JuanMa360 commited on
Commit
f657b57
·
1 Parent(s): 7febbe7

refactor: model file

Browse files
Files changed (2) hide show
  1. .DS_Store +0 -0
  2. dd360-v1-3b/config.json +64 -0
.DS_Store ADDED
Binary file (6.15 kB). View file
 
dd360-v1-3b/config.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "MILVLG/imp-v1-3b",
3
+ "activation_function": "gelu_new",
4
+ "architectures": [
5
+ "ImpForCausalLM"
6
+ ],
7
+ "attn_pdrop": 0.0,
8
+ "auto_map": {
9
+ "AutoConfig": "MILVLG/imp-v1-3b--configuration_imp.ImpConfig",
10
+ "AutoModelForCausalLM": "MILVLG/imp-v1-3b--modeling_imp.ImpForCausalLM"
11
+ },
12
+ "embd_pdrop": 0.0,
13
+ "eos_token_id": 50295,
14
+ "flash_attn": false,
15
+ "flash_rotary": false,
16
+ "freeze_mm_mlp_adapter": false,
17
+ "fused_dense": false,
18
+ "image_aspect_ratio": "square",
19
+ "image_token": "<image>",
20
+ "image_token_index": 50296,
21
+ "img_processor": null,
22
+ "initializer_range": 0.02,
23
+ "layer_norm_epsilon": 1e-05,
24
+ "mm_hidden_size": 1152,
25
+ "mm_projector_lr": 2e-05,
26
+ "mm_projector_type": "mlp2x_gelu",
27
+ "mm_use_im_patch_token": false,
28
+ "mm_use_im_start_end": false,
29
+ "mm_vision_select_feature": "patch",
30
+ "mm_vision_select_layer": -2,
31
+ "mm_vision_tower": "google/siglip-so400m-patch14-384",
32
+ "model_type": "imp",
33
+ "n_embd": 2560,
34
+ "n_head": 32,
35
+ "n_head_kv": null,
36
+ "n_inner": null,
37
+ "n_layer": 32,
38
+ "n_positions": 3072,
39
+ "pad_token_id": 50256,
40
+ "resid_pdrop": 0.1,
41
+ "rotary_dim": 32,
42
+ "tie_word_embeddings": false,
43
+ "tokenizer_model_max_length": 3072,
44
+ "tokenizer_padding_side": "right",
45
+ "torch_dtype": "float16",
46
+ "transformers_version": "4.38.2",
47
+ "use_cache": true,
48
+ "use_mm_proj": true,
49
+ "vision_tower_config": {
50
+ "attention_dropout": 0.0,
51
+ "attn_implementation": null,
52
+ "hidden_act": "gelu_pytorch_tanh",
53
+ "hidden_size": 1152,
54
+ "image_size": 384,
55
+ "intermediate_size": 4304,
56
+ "layer_norm_eps": 1e-06,
57
+ "model_type": "siglip_vision_model",
58
+ "num_attention_heads": 16,
59
+ "num_channels": 3,
60
+ "num_hidden_layers": 27,
61
+ "patch_size": 14
62
+ },
63
+ "vocab_size": 51200
64
+ }