pstan commited on
Commit
23abb59
·
verified ·
1 Parent(s): 1282020

Upload config.json

Browse files
onnx/directml/phi-3-vision-128k-instruct-int4/config.json ADDED
@@ -0,0 +1,148 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "Phi-3-vision-128k-instruct",
3
+ "architectures": [
4
+ "Phi3VForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_phi3_v.Phi3VConfig",
9
+ "AutoModelForCausalLM": "modeling_phi3_v.Phi3VForCausalLM"
10
+ },
11
+ "bos_token_id": 1,
12
+ "embd_layer": {
13
+ "embedding_cls": "image",
14
+ "hd_transform_order": "sub_glb",
15
+ "projection_cls": "mlp",
16
+ "use_hd_transform": true,
17
+ "with_learnable_separator": true
18
+ },
19
+ "eos_token_id": 2,
20
+ "hidden_act": "silu",
21
+ "hidden_size": 3072,
22
+ "img_processor": {
23
+ "image_dim_out": 1024,
24
+ "model_name": "openai/clip-vit-large-patch14-336",
25
+ "name": "clip_vision_model",
26
+ "num_img_tokens": 144
27
+ },
28
+ "initializer_range": 0.02,
29
+ "intermediate_size": 8192,
30
+ "max_position_embeddings": 131072,
31
+ "model_type": "phi3_v",
32
+ "num_attention_heads": 32,
33
+ "num_hidden_layers": 32,
34
+ "num_key_value_heads": 32,
35
+ "original_max_position_embeddings": 4096,
36
+ "rms_norm_eps": 1e-05,
37
+ "rope_scaling": {
38
+ "long_factor": [
39
+ 1.0299999713897705,
40
+ 1.0499999523162842,
41
+ 1.0499999523162842,
42
+ 1.0799999237060547,
43
+ 1.2299998998641968,
44
+ 1.2299998998641968,
45
+ 1.2999999523162842,
46
+ 1.4499999284744263,
47
+ 1.5999999046325684,
48
+ 1.6499998569488525,
49
+ 1.8999998569488525,
50
+ 2.859999895095825,
51
+ 3.68999981880188,
52
+ 5.419999599456787,
53
+ 5.489999771118164,
54
+ 5.489999771118164,
55
+ 9.09000015258789,
56
+ 11.579999923706055,
57
+ 15.65999984741211,
58
+ 15.769999504089355,
59
+ 15.789999961853027,
60
+ 18.360000610351562,
61
+ 21.989999771118164,
62
+ 23.079999923706055,
63
+ 30.009998321533203,
64
+ 32.35000228881836,
65
+ 32.590003967285156,
66
+ 35.56000518798828,
67
+ 39.95000457763672,
68
+ 53.840003967285156,
69
+ 56.20000457763672,
70
+ 57.95000457763672,
71
+ 59.29000473022461,
72
+ 59.77000427246094,
73
+ 59.920005798339844,
74
+ 61.190006256103516,
75
+ 61.96000671386719,
76
+ 62.50000762939453,
77
+ 63.3700065612793,
78
+ 63.48000717163086,
79
+ 63.48000717163086,
80
+ 63.66000747680664,
81
+ 63.850006103515625,
82
+ 64.08000946044922,
83
+ 64.760009765625,
84
+ 64.80001068115234,
85
+ 64.81001281738281,
86
+ 64.81001281738281
87
+ ],
88
+ "short_factor": [
89
+ 1.05,
90
+ 1.05,
91
+ 1.05,
92
+ 1.1,
93
+ 1.1,
94
+ 1.1,
95
+ 1.2500000000000002,
96
+ 1.2500000000000002,
97
+ 1.4000000000000004,
98
+ 1.4500000000000004,
99
+ 1.5500000000000005,
100
+ 1.8500000000000008,
101
+ 1.9000000000000008,
102
+ 2.000000000000001,
103
+ 2.000000000000001,
104
+ 2.000000000000001,
105
+ 2.000000000000001,
106
+ 2.000000000000001,
107
+ 2.000000000000001,
108
+ 2.000000000000001,
109
+ 2.000000000000001,
110
+ 2.000000000000001,
111
+ 2.000000000000001,
112
+ 2.000000000000001,
113
+ 2.000000000000001,
114
+ 2.000000000000001,
115
+ 2.000000000000001,
116
+ 2.000000000000001,
117
+ 2.000000000000001,
118
+ 2.000000000000001,
119
+ 2.000000000000001,
120
+ 2.000000000000001,
121
+ 2.1000000000000005,
122
+ 2.1000000000000005,
123
+ 2.2,
124
+ 2.3499999999999996,
125
+ 2.3499999999999996,
126
+ 2.3499999999999996,
127
+ 2.3499999999999996,
128
+ 2.3999999999999995,
129
+ 2.3999999999999995,
130
+ 2.6499999999999986,
131
+ 2.6999999999999984,
132
+ 2.8999999999999977,
133
+ 2.9499999999999975,
134
+ 3.049999999999997,
135
+ 3.049999999999997,
136
+ 3.049999999999997
137
+ ],
138
+ "type": "su"
139
+ },
140
+ "rope_theta": 10000.0,
141
+ "sliding_window": 131072,
142
+ "tie_word_embeddings": false,
143
+ "torch_dtype": "bfloat16",
144
+ "transformers_version": "4.38.1",
145
+ "use_cache": true,
146
+ "vocab_size": 32064,
147
+ "_attn_implementation": "flash_attention_2"
148
+ }