KabilanM commited on
Commit
6cd1469
·
1 Parent(s): 0eef622

Delete config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -141
config.json DELETED
@@ -1,141 +0,0 @@
1
- {
2
- "_commit_hash": "70120ba84d68ca1211e007c4fb61d0cd5424be54",
3
- "_name_or_path": "facebook/detr-resnet-50",
4
- "activation_dropout": 0.0,
5
- "activation_function": "relu",
6
- "architectures": [
7
- "DetrForObjectDetection"
8
- ],
9
- "attention_dropout": 0.0,
10
- "auxiliary_loss": false,
11
- "backbone": null,
12
- "backbone_config": {
13
- "_name_or_path": "",
14
- "add_cross_attention": false,
15
- "architectures": null,
16
- "bad_words_ids": null,
17
- "begin_suppress_tokens": null,
18
- "bos_token_id": null,
19
- "chunk_size_feed_forward": 0,
20
- "cross_attention_hidden_size": null,
21
- "decoder_start_token_id": null,
22
- "depths": [
23
- 3,
24
- 4,
25
- 6,
26
- 3
27
- ],
28
- "diversity_penalty": 0.0,
29
- "do_sample": false,
30
- "downsample_in_first_stage": false,
31
- "early_stopping": false,
32
- "embedding_size": 64,
33
- "encoder_no_repeat_ngram_size": 0,
34
- "eos_token_id": null,
35
- "exponential_decay_length_penalty": null,
36
- "finetuning_task": null,
37
- "forced_bos_token_id": null,
38
- "forced_eos_token_id": null,
39
- "hidden_act": "relu",
40
- "hidden_sizes": [
41
- 256,
42
- 512,
43
- 1024,
44
- 2048
45
- ],
46
- "id2label": {
47
- "0": "LABEL_0",
48
- "1": "LABEL_1"
49
- },
50
- "is_decoder": false,
51
- "is_encoder_decoder": false,
52
- "label2id": {
53
- "LABEL_0": 0,
54
- "LABEL_1": 1
55
- },
56
- "layer_type": "bottleneck",
57
- "length_penalty": 1.0,
58
- "max_length": 20,
59
- "min_length": 0,
60
- "model_type": "resnet",
61
- "no_repeat_ngram_size": 0,
62
- "num_beam_groups": 1,
63
- "num_beams": 1,
64
- "num_channels": 3,
65
- "num_return_sequences": 1,
66
- "out_features": [
67
- "stage4"
68
- ],
69
- "output_attentions": false,
70
- "output_hidden_states": false,
71
- "output_scores": false,
72
- "pad_token_id": null,
73
- "prefix": null,
74
- "problem_type": null,
75
- "pruned_heads": {},
76
- "remove_invalid_values": false,
77
- "repetition_penalty": 1.0,
78
- "return_dict": true,
79
- "return_dict_in_generate": false,
80
- "sep_token_id": null,
81
- "stage_names": [
82
- "stem",
83
- "stage1",
84
- "stage2",
85
- "stage3",
86
- "stage4"
87
- ],
88
- "suppress_tokens": null,
89
- "task_specific_params": null,
90
- "temperature": 1.0,
91
- "tf_legacy_loss": false,
92
- "tie_encoder_decoder": false,
93
- "tie_word_embeddings": true,
94
- "tokenizer_class": null,
95
- "top_k": 50,
96
- "top_p": 1.0,
97
- "torch_dtype": null,
98
- "torchscript": false,
99
- "transformers_version": "4.27.0",
100
- "typical_p": 1.0,
101
- "use_bfloat16": false
102
- },
103
- "bbox_cost": 5,
104
- "bbox_loss_coefficient": 5,
105
- "class_cost": 1,
106
- "d_model": 256,
107
- "decoder_attention_heads": 8,
108
- "decoder_ffn_dim": 2048,
109
- "decoder_layerdrop": 0.0,
110
- "decoder_layers": 6,
111
- "dice_loss_coefficient": 1,
112
- "dilation": null,
113
- "dropout": 0.1,
114
- "encoder_attention_heads": 8,
115
- "encoder_ffn_dim": 2048,
116
- "encoder_layerdrop": 0.0,
117
- "encoder_layers": 6,
118
- "eos_coefficient": 0.1,
119
- "giou_cost": 2,
120
- "giou_loss_coefficient": 2,
121
- "id2label": {
122
- "0": "LABEL_0"
123
- },
124
- "init_std": 0.02,
125
- "init_xavier_std": 1.0,
126
- "is_encoder_decoder": true,
127
- "label2id": {
128
- "LABEL_0": 0
129
- },
130
- "mask_loss_coefficient": 1,
131
- "model_type": "detr",
132
- "num_channels": 3,
133
- "num_hidden_layers": 6,
134
- "num_queries": 100,
135
- "position_embedding_type": "sine",
136
- "scale_embedding": false,
137
- "torch_dtype": "float32",
138
- "transformers_version": null,
139
- "use_pretrained_backbone": null,
140
- "use_timm_backbone": false
141
- }