KabilanM commited on
Commit
38a7785
·
1 Parent(s): d1da0ad

Delete config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -139
config.json DELETED
@@ -1,139 +0,0 @@
1
- {
2
- "_commit_hash": "c47ba6c3f6c346a9e64538186c560de2d7eba777",
3
- "_name_or_path": "spark-ds549/detr-label-detection",
4
- "activation_dropout": 0.0,
5
- "activation_function": "relu",
6
- "architectures": [
7
- "DetrModel"
8
- ],
9
- "attention_dropout": 0.0,
10
- "auxiliary_loss": false,
11
- "backbone": null,
12
- "backbone_config": {
13
- "_name_or_path": "",
14
- "add_cross_attention": false,
15
- "architectures": null,
16
- "bad_words_ids": null,
17
- "begin_suppress_tokens": null,
18
- "bos_token_id": null,
19
- "chunk_size_feed_forward": 0,
20
- "cross_attention_hidden_size": null,
21
- "decoder_start_token_id": null,
22
- "depths": [
23
- 3,
24
- 4,
25
- 6,
26
- 3
27
- ],
28
- "diversity_penalty": 0.0,
29
- "do_sample": false,
30
- "downsample_in_first_stage": false,
31
- "early_stopping": false,
32
- "embedding_size": 64,
33
- "encoder_no_repeat_ngram_size": 0,
34
- "eos_token_id": null,
35
- "exponential_decay_length_penalty": null,
36
- "finetuning_task": null,
37
- "forced_bos_token_id": null,
38
- "forced_eos_token_id": null,
39
- "hidden_act": "relu",
40
- "hidden_sizes": [
41
- 256,
42
- 512,
43
- 1024,
44
- 2048
45
- ],
46
- "id2label": {
47
- "0": "Label"
48
- },
49
- "is_decoder": false,
50
- "is_encoder_decoder": false,
51
- "label2id": {
52
- "Label": 0
53
- },
54
- "layer_type": "bottleneck",
55
- "length_penalty": 1.0,
56
- "max_length": 20,
57
- "min_length": 0,
58
- "model_type": "resnet",
59
- "no_repeat_ngram_size": 0,
60
- "num_beam_groups": 1,
61
- "num_beams": 1,
62
- "num_channels": 3,
63
- "num_return_sequences": 1,
64
- "out_features": [
65
- "stage4"
66
- ],
67
- "output_attentions": false,
68
- "output_hidden_states": false,
69
- "output_scores": false,
70
- "pad_token_id": null,
71
- "prefix": null,
72
- "problem_type": null,
73
- "pruned_heads": {},
74
- "remove_invalid_values": false,
75
- "repetition_penalty": 1.0,
76
- "return_dict": true,
77
- "return_dict_in_generate": false,
78
- "sep_token_id": null,
79
- "stage_names": [
80
- "stem",
81
- "stage1",
82
- "stage2",
83
- "stage3",
84
- "stage4"
85
- ],
86
- "suppress_tokens": null,
87
- "task_specific_params": null,
88
- "temperature": 1.0,
89
- "tf_legacy_loss": false,
90
- "tie_encoder_decoder": false,
91
- "tie_word_embeddings": true,
92
- "tokenizer_class": null,
93
- "top_k": 50,
94
- "top_p": 1.0,
95
- "torch_dtype": null,
96
- "torchscript": false,
97
- "transformers_version": "4.27.0",
98
- "typical_p": 1.0,
99
- "use_bfloat16": false
100
- },
101
- "bbox_cost": 5,
102
- "bbox_loss_coefficient": 5,
103
- "class_cost": 1,
104
- "d_model": 256,
105
- "decoder_attention_heads": 8,
106
- "decoder_ffn_dim": 2048,
107
- "decoder_layerdrop": 0.0,
108
- "decoder_layers": 6,
109
- "dice_loss_coefficient": 1,
110
- "dilation": null,
111
- "dropout": 0.1,
112
- "encoder_attention_heads": 8,
113
- "encoder_ffn_dim": 2048,
114
- "encoder_layerdrop": 0.0,
115
- "encoder_layers": 6,
116
- "eos_coefficient": 0.1,
117
- "giou_cost": 2,
118
- "giou_loss_coefficient": 2,
119
- "id2label": {
120
- "0": "Label"
121
- },
122
- "init_std": 0.02,
123
- "init_xavier_std": 1.0,
124
- "is_encoder_decoder": true,
125
- "label2id": {
126
- "Label": 0
127
- },
128
- "mask_loss_coefficient": 1,
129
- "model_type": "detr",
130
- "num_channels": 3,
131
- "num_hidden_layers": 6,
132
- "num_queries": 100,
133
- "position_embedding_type": "sine",
134
- "scale_embedding": false,
135
- "torch_dtype": "float32",
136
- "transformers_version": null,
137
- "use_pretrained_backbone": null,
138
- "use_timm_backbone": false
139
- }