sharpenb commited on
Commit
9c6aace
·
verified ·
1 Parent(s): 77bc092

Upload folder using huggingface_hub (#7)

Browse files

- f4b40dda49f8515330a4d5d4806f915b22cfd3fbe91d5ea4cc3a15522ef46865 (d7c7c12fe204d7de1f40a29e753421e3aa4e97d8)
- 190e88392a8a5e6d5a67db1a6a015ed96061ac29ae3b5f7135c660166fc8fb01 (0572443b2d014428c6b7b14175b496e7dc3a1ef0)

Files changed (3) hide show
  1. config.json +253 -7
  2. model.safetensors +1 -1
  3. smash_config.json +1 -1
config.json CHANGED
@@ -1,17 +1,263 @@
1
  {
2
- "_name_or_path": "/covalent/.cache/models/tmpojp4ro91_v35hdub",
3
  "activation_function": "gelu_new",
4
  "all_reduce_scores": {
5
- "0": "SUCCESS",
6
- "1": "SUCCESS",
7
- "2": "SUCCESS",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  "3": "NON_PARTICIPATING",
9
- "4": "NON_PARTICIPATING",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  "5": "NON_PARTICIPATING",
 
 
 
 
 
 
 
 
 
 
11
  "6": "NON_PARTICIPATING",
12
- "7": "NON_PARTICIPATING",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  "8": "NON_PARTICIPATING",
14
- "9": "NON_PARTICIPATING"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
  },
16
  "architectures": [
17
  "GPTOptim"
 
1
  {
2
+ "_name_or_path": "/covalent/.cache/models/tmp08kx47yu9qzsmm_o",
3
  "activation_function": "gelu_new",
4
  "all_reduce_scores": {
5
+ "0": "NON_PARTICIPATING",
6
+ "1": "NON_PARTICIPATING",
7
+ "10": "NON_PARTICIPATING",
8
+ "100": "NON_PARTICIPATING",
9
+ "101": "NON_PARTICIPATING",
10
+ "102": "SUCCESS",
11
+ "103": "NON_PARTICIPATING",
12
+ "104": "NON_PARTICIPATING",
13
+ "105": "NON_PARTICIPATING",
14
+ "106": "NON_PARTICIPATING",
15
+ "107": "NON_PARTICIPATING",
16
+ "108": "NON_PARTICIPATING",
17
+ "109": "NON_PARTICIPATING",
18
+ "11": "SUCCESS",
19
+ "110": "NON_PARTICIPATING",
20
+ "111": "NON_PARTICIPATING",
21
+ "112": "NON_PARTICIPATING",
22
+ "113": "NON_PARTICIPATING",
23
+ "114": "NON_PARTICIPATING",
24
+ "115": "NON_PARTICIPATING",
25
+ "116": "NON_PARTICIPATING",
26
+ "117": "NON_PARTICIPATING",
27
+ "118": "NON_PARTICIPATING",
28
+ "119": "SUCCESS",
29
+ "12": "NON_PARTICIPATING",
30
+ "120": "NON_PARTICIPATING",
31
+ "121": "NON_PARTICIPATING",
32
+ "122": "NON_PARTICIPATING",
33
+ "123": "SUCCESS",
34
+ "124": "NON_PARTICIPATING",
35
+ "125": "NON_PARTICIPATING",
36
+ "126": "NON_PARTICIPATING",
37
+ "127": "NON_PARTICIPATING",
38
+ "128": "SUCCESS",
39
+ "129": "NON_PARTICIPATING",
40
+ "13": "NON_PARTICIPATING",
41
+ "130": "NON_PARTICIPATING",
42
+ "131": "NON_PARTICIPATING",
43
+ "132": "NON_PARTICIPATING",
44
+ "133": "NON_PARTICIPATING",
45
+ "134": "NON_PARTICIPATING",
46
+ "135": "NON_PARTICIPATING",
47
+ "136": "NON_PARTICIPATING",
48
+ "137": "NON_PARTICIPATING",
49
+ "138": "NON_PARTICIPATING",
50
+ "139": "NON_PARTICIPATING",
51
+ "14": "NON_PARTICIPATING",
52
+ "140": "SUCCESS",
53
+ "141": "NON_PARTICIPATING",
54
+ "142": "NON_PARTICIPATING",
55
+ "143": "NON_PARTICIPATING",
56
+ "144": "NON_PARTICIPATING",
57
+ "145": "NON_PARTICIPATING",
58
+ "146": "NON_PARTICIPATING",
59
+ "147": "SUCCESS",
60
+ "148": "NON_PARTICIPATING",
61
+ "149": "SUCCESS",
62
+ "15": "NON_PARTICIPATING",
63
+ "150": "NON_PARTICIPATING",
64
+ "151": "SUCCESS",
65
+ "152": "NON_PARTICIPATING",
66
+ "153": "NON_PARTICIPATING",
67
+ "154": "NON_PARTICIPATING",
68
+ "155": "NON_PARTICIPATING",
69
+ "156": "NON_PARTICIPATING",
70
+ "157": "NON_PARTICIPATING",
71
+ "158": "NON_PARTICIPATING",
72
+ "159": "NON_PARTICIPATING",
73
+ "16": "SUCCESS",
74
+ "160": "SUCCESS",
75
+ "161": "SUCCESS",
76
+ "162": "SUCCESS",
77
+ "163": "NON_PARTICIPATING",
78
+ "164": "SUCCESS",
79
+ "165": "NON_PARTICIPATING",
80
+ "166": "NON_PARTICIPATING",
81
+ "167": "NON_PARTICIPATING",
82
+ "168": "NON_PARTICIPATING",
83
+ "169": "NON_PARTICIPATING",
84
+ "17": "NON_PARTICIPATING",
85
+ "170": "SUCCESS",
86
+ "171": "NON_PARTICIPATING",
87
+ "172": "SUCCESS",
88
+ "173": "NON_PARTICIPATING",
89
+ "174": "NON_PARTICIPATING",
90
+ "175": "NON_PARTICIPATING",
91
+ "176": "NON_PARTICIPATING",
92
+ "177": "NON_PARTICIPATING",
93
+ "178": "NON_PARTICIPATING",
94
+ "179": "NON_PARTICIPATING",
95
+ "18": "NON_PARTICIPATING",
96
+ "180": "NON_PARTICIPATING",
97
+ "181": "NON_PARTICIPATING",
98
+ "182": "NON_PARTICIPATING",
99
+ "183": "NON_PARTICIPATING",
100
+ "184": "NON_PARTICIPATING",
101
+ "185": "NON_PARTICIPATING",
102
+ "186": "NON_PARTICIPATING",
103
+ "187": "NON_PARTICIPATING",
104
+ "188": "NON_PARTICIPATING",
105
+ "189": "NON_PARTICIPATING",
106
+ "19": "NON_PARTICIPATING",
107
+ "190": "NON_PARTICIPATING",
108
+ "191": "NON_PARTICIPATING",
109
+ "192": "NON_PARTICIPATING",
110
+ "193": "NON_PARTICIPATING",
111
+ "194": "NON_PARTICIPATING",
112
+ "195": "NON_PARTICIPATING",
113
+ "196": "NON_PARTICIPATING",
114
+ "197": "SUCCESS",
115
+ "198": "NON_PARTICIPATING",
116
+ "199": "NON_PARTICIPATING",
117
+ "2": "NON_PARTICIPATING",
118
+ "20": "NON_PARTICIPATING",
119
+ "200": "NON_PARTICIPATING",
120
+ "201": "SUCCESS",
121
+ "202": "NON_PARTICIPATING",
122
+ "203": "NON_PARTICIPATING",
123
+ "204": "NON_PARTICIPATING",
124
+ "205": "NON_PARTICIPATING",
125
+ "206": "NON_PARTICIPATING",
126
+ "207": "NON_PARTICIPATING",
127
+ "208": "NON_PARTICIPATING",
128
+ "209": "SUCCESS",
129
+ "21": "SUCCESS",
130
+ "210": "NON_PARTICIPATING",
131
+ "211": "SUCCESS",
132
+ "212": "SUCCESS",
133
+ "213": "SUCCESS",
134
+ "214": "NON_PARTICIPATING",
135
+ "215": "NON_PARTICIPATING",
136
+ "216": "NON_PARTICIPATING",
137
+ "217": "NON_PARTICIPATING",
138
+ "218": "NON_PARTICIPATING",
139
+ "219": "NON_PARTICIPATING",
140
+ "22": "NON_PARTICIPATING",
141
+ "220": "SUCCESS",
142
+ "221": "NON_PARTICIPATING",
143
+ "222": "NON_PARTICIPATING",
144
+ "223": "NON_PARTICIPATING",
145
+ "224": "NON_PARTICIPATING",
146
+ "225": "NON_PARTICIPATING",
147
+ "226": "NON_PARTICIPATING",
148
+ "227": "SUCCESS",
149
+ "228": "SUCCESS",
150
+ "229": "SUCCESS",
151
+ "23": "NON_PARTICIPATING",
152
+ "230": "SUCCESS",
153
+ "231": "SUCCESS",
154
+ "232": "SUCCESS",
155
+ "233": "NON_PARTICIPATING",
156
+ "234": "NON_PARTICIPATING",
157
+ "235": "NON_PARTICIPATING",
158
+ "236": "SUCCESS",
159
+ "237": "SUCCESS",
160
+ "238": "NON_PARTICIPATING",
161
+ "239": "NON_PARTICIPATING",
162
+ "24": "NON_PARTICIPATING",
163
+ "240": "NON_PARTICIPATING",
164
+ "241": "NON_PARTICIPATING",
165
+ "242": "NON_PARTICIPATING",
166
+ "243": "NON_PARTICIPATING",
167
+ "244": "SUCCESS",
168
+ "245": "NON_PARTICIPATING",
169
+ "246": "NON_PARTICIPATING",
170
+ "247": "NON_PARTICIPATING",
171
+ "248": "SUCCESS",
172
+ "249": "NON_PARTICIPATING",
173
+ "25": "SUCCESS",
174
+ "250": "NON_PARTICIPATING",
175
+ "251": "NON_PARTICIPATING",
176
+ "252": "NON_PARTICIPATING",
177
+ "253": "NON_PARTICIPATING",
178
+ "254": "NON_PARTICIPATING",
179
+ "255": "NON_PARTICIPATING",
180
+ "26": "SUCCESS",
181
+ "27": "NON_PARTICIPATING",
182
+ "28": "NON_PARTICIPATING",
183
+ "29": "SUCCESS",
184
  "3": "NON_PARTICIPATING",
185
+ "30": "NON_PARTICIPATING",
186
+ "31": "NON_PARTICIPATING",
187
+ "32": "NON_PARTICIPATING",
188
+ "33": "NON_PARTICIPATING",
189
+ "34": "NON_PARTICIPATING",
190
+ "35": "NON_PARTICIPATING",
191
+ "36": "NON_PARTICIPATING",
192
+ "37": "NON_PARTICIPATING",
193
+ "38": "NON_PARTICIPATING",
194
+ "39": "NON_PARTICIPATING",
195
+ "4": "SUCCESS",
196
+ "40": "NON_PARTICIPATING",
197
+ "41": "NON_PARTICIPATING",
198
+ "42": "NON_PARTICIPATING",
199
+ "43": "NON_PARTICIPATING",
200
+ "44": "NON_PARTICIPATING",
201
+ "45": "NON_PARTICIPATING",
202
+ "46": "NON_PARTICIPATING",
203
+ "47": "NON_PARTICIPATING",
204
+ "48": "NON_PARTICIPATING",
205
+ "49": "SUCCESS",
206
  "5": "NON_PARTICIPATING",
207
+ "50": "SUCCESS",
208
+ "51": "NON_PARTICIPATING",
209
+ "52": "NON_PARTICIPATING",
210
+ "53": "NON_PARTICIPATING",
211
+ "54": "NON_PARTICIPATING",
212
+ "55": "NON_PARTICIPATING",
213
+ "56": "NON_PARTICIPATING",
214
+ "57": "NON_PARTICIPATING",
215
+ "58": "NON_PARTICIPATING",
216
+ "59": "SUCCESS",
217
  "6": "NON_PARTICIPATING",
218
+ "60": "NON_PARTICIPATING",
219
+ "61": "SUCCESS",
220
+ "62": "NON_PARTICIPATING",
221
+ "63": "NON_PARTICIPATING",
222
+ "64": "NON_PARTICIPATING",
223
+ "65": "NON_PARTICIPATING",
224
+ "66": "NON_PARTICIPATING",
225
+ "67": "NON_PARTICIPATING",
226
+ "68": "SUCCESS",
227
+ "69": "NON_PARTICIPATING",
228
+ "7": "SUCCESS",
229
+ "70": "NON_PARTICIPATING",
230
+ "71": "NON_PARTICIPATING",
231
+ "72": "NON_PARTICIPATING",
232
+ "73": "NON_PARTICIPATING",
233
+ "74": "NON_PARTICIPATING",
234
+ "75": "SUCCESS",
235
+ "76": "NON_PARTICIPATING",
236
+ "77": "SUCCESS",
237
+ "78": "NON_PARTICIPATING",
238
+ "79": "NON_PARTICIPATING",
239
  "8": "NON_PARTICIPATING",
240
+ "80": "NON_PARTICIPATING",
241
+ "81": "NON_PARTICIPATING",
242
+ "82": "NON_PARTICIPATING",
243
+ "83": "NON_PARTICIPATING",
244
+ "84": "NON_PARTICIPATING",
245
+ "85": "SUCCESS",
246
+ "86": "NON_PARTICIPATING",
247
+ "87": "NON_PARTICIPATING",
248
+ "88": "NON_PARTICIPATING",
249
+ "89": "NON_PARTICIPATING",
250
+ "9": "NON_PARTICIPATING",
251
+ "90": "NON_PARTICIPATING",
252
+ "91": "NON_PARTICIPATING",
253
+ "92": "NON_PARTICIPATING",
254
+ "93": "SUCCESS",
255
+ "94": "NON_PARTICIPATING",
256
+ "95": "NON_PARTICIPATING",
257
+ "96": "SUCCESS",
258
+ "97": "NON_PARTICIPATING",
259
+ "98": "NON_PARTICIPATING",
260
+ "99": "NON_PARTICIPATING"
261
  },
262
  "architectures": [
263
  "GPTOptim"
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:db41930bdd61467aa1ba5067999aae21152cdefb129cae5101d4fb6a3345361c
3
  size 2306069384
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b8a5da802414279a12d92400d78f2a170f6fe9be886d444d56599c9e1ca53e7a
3
  size 2306069384
smash_config.json CHANGED
@@ -28,7 +28,7 @@
28
  "quant_llm-int8_weight_bits": 8,
29
  "max_batch_size": 1,
30
  "device": "cuda",
31
- "cache_dir": "/covalent/.cache/models/tmpojp4ro91",
32
  "task": "",
33
  "save_load_fn": "bitsandbytes",
34
  "save_load_fn_args": {}
 
28
  "quant_llm-int8_weight_bits": 8,
29
  "max_batch_size": 1,
30
  "device": "cuda",
31
+ "cache_dir": "/covalent/.cache/models/tmp08kx47yu",
32
  "task": "",
33
  "save_load_fn": "bitsandbytes",
34
  "save_load_fn_args": {}