qgallouedec HF Staff commited on
Commit
90c7543
·
verified ·
1 Parent(s): 97f190e

Upload LlavaNextForConditionalGeneration

Browse files
Files changed (3) hide show
  1. config.json +5 -3
  2. generation_config.json +1 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -2,6 +2,7 @@
2
  "architectures": [
3
  "LlavaNextForConditionalGeneration"
4
  ],
 
5
  "ignore_index": -100,
6
  "image_grid_pinpoints": [
7
  [
@@ -36,11 +37,13 @@
36
  "MistralForCausalLM"
37
  ],
38
  "attention_dropout": 0.0,
 
39
  "head_dim": null,
40
  "hidden_act": "silu",
41
  "hidden_size": 16,
42
  "initializer_range": 0.02,
43
  "intermediate_size": 14336,
 
44
  "max_position_embeddings": 32768,
45
  "model_type": "mistral",
46
  "num_attention_heads": 4,
@@ -49,16 +52,15 @@
49
  "rms_norm_eps": 1e-05,
50
  "rope_theta": 1000000.0,
51
  "sliding_window": null,
52
- "torch_dtype": "bfloat16",
53
  "use_cache": true,
54
  "vocab_size": 32064
55
  },
56
  "tie_word_embeddings": false,
57
- "torch_dtype": "bfloat16",
58
- "transformers_version": "4.56.0.dev0",
59
  "use_image_newline_parameter": true,
60
  "vision_config": {
61
  "attention_dropout": 0.0,
 
62
  "hidden_act": "quick_gelu",
63
  "hidden_size": 16,
64
  "image_size": 336,
 
2
  "architectures": [
3
  "LlavaNextForConditionalGeneration"
4
  ],
5
+ "dtype": "bfloat16",
6
  "ignore_index": -100,
7
  "image_grid_pinpoints": [
8
  [
 
37
  "MistralForCausalLM"
38
  ],
39
  "attention_dropout": 0.0,
40
+ "dtype": "bfloat16",
41
  "head_dim": null,
42
  "hidden_act": "silu",
43
  "hidden_size": 16,
44
  "initializer_range": 0.02,
45
  "intermediate_size": 14336,
46
+ "layer_types": null,
47
  "max_position_embeddings": 32768,
48
  "model_type": "mistral",
49
  "num_attention_heads": 4,
 
52
  "rms_norm_eps": 1e-05,
53
  "rope_theta": 1000000.0,
54
  "sliding_window": null,
 
55
  "use_cache": true,
56
  "vocab_size": 32064
57
  },
58
  "tie_word_embeddings": false,
59
+ "transformers_version": "4.57.0.dev0",
 
60
  "use_image_newline_parameter": true,
61
  "vision_config": {
62
  "attention_dropout": 0.0,
63
+ "embed_dim": 32,
64
  "hidden_act": "quick_gelu",
65
  "hidden_size": 16,
66
  "image_size": 336,
generation_config.json CHANGED
@@ -2,5 +2,5 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
- "transformers_version": "4.56.0.dev0"
6
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
+ "transformers_version": "4.57.0.dev0"
6
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:de69f2ae14700ea96ced1956f7613d0e2f245831b3bbb7cb91de653ddebbcc38
3
  size 5399728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:346f654b90cd0fafed9c3417b66251d23252047a55a994e5c699803f3d7b7317
3
  size 5399728