Update config.json
Browse files- config.json +2 -18
config.json
CHANGED
@@ -4,7 +4,7 @@
|
|
4 |
"LlavaForConditionalGeneration"
|
5 |
],
|
6 |
"ignore_index": -100,
|
7 |
-
"image_seq_length":
|
8 |
"image_token_index": 10,
|
9 |
"model_type": "llava",
|
10 |
"projector_hidden_act": "gelu",
|
@@ -232,28 +232,12 @@
|
|
232 |
"quantization_status": "compressed"
|
233 |
},
|
234 |
"text_config": {
|
235 |
-
"add_cross_attention": false,
|
236 |
-
"bos_token_id": 1,
|
237 |
-
"eos_token_id": 2,
|
238 |
-
"head_dim": 128,
|
239 |
-
"hidden_act": "silu",
|
240 |
"hidden_size": 5120,
|
241 |
-
"
|
242 |
-
"0": "LABEL_0",
|
243 |
-
"1": "LABEL_1"
|
244 |
-
},
|
245 |
-
"initializer_range": 0.02,
|
246 |
"intermediate_size": 14336,
|
247 |
"is_composition": true,
|
248 |
-
"is_decoder": false,
|
249 |
-
"is_encoder_decoder": false,
|
250 |
-
"label2id": {
|
251 |
-
"LABEL_0": 0,
|
252 |
-
"LABEL_1": 1
|
253 |
-
},
|
254 |
"max_position_embeddings": 1024000,
|
255 |
"model_type": "mistral",
|
256 |
-
"num_attention_heads": 32,
|
257 |
"num_hidden_layers": 40,
|
258 |
"num_key_value_heads": 8,
|
259 |
"rms_norm_eps": 1e-05,
|
|
|
4 |
"LlavaForConditionalGeneration"
|
5 |
],
|
6 |
"ignore_index": -100,
|
7 |
+
"image_seq_length": 1,
|
8 |
"image_token_index": 10,
|
9 |
"model_type": "llava",
|
10 |
"projector_hidden_act": "gelu",
|
|
|
232 |
"quantization_status": "compressed"
|
233 |
},
|
234 |
"text_config": {
|
|
|
|
|
|
|
|
|
|
|
235 |
"hidden_size": 5120,
|
236 |
+
"head_dim": 128,
|
|
|
|
|
|
|
|
|
237 |
"intermediate_size": 14336,
|
238 |
"is_composition": true,
|
|
|
|
|
|
|
|
|
|
|
|
|
239 |
"max_position_embeddings": 1024000,
|
240 |
"model_type": "mistral",
|
|
|
241 |
"num_hidden_layers": 40,
|
242 |
"num_key_value_heads": 8,
|
243 |
"rms_norm_eps": 1e-05,
|