Upload folder using huggingface_hub (#4)
Browse files- 9caece07fb669021407613908b6d446e2256577093eae42ab22077c1e7cc83f9 (b5fb9e78a35c71128f468c35c393ee336046f0da)
- 996dc59ff5f72438e0e2eb6cf8718a5a84689871a55b111d71a0217e89dd0ea3 (ec5d98d9eb1979b9b1c014ef8a15990ab5a184d4)
- d615c835ada5f31aec3bd4531531f06dc0a94ab59491cdc0cd307beb15e58e51 (70503c841059b4356b469339db3968436977e9e7)
- 198bdcd211d1ffcc79469f3184053185cc7964167a008a27f46549bb6909cf79 (da93a52c2ca6006a8a62ab05074b61088a2d615b)
- 3266ecd9655d1adbed198bf1bc7f49fcc5d92d48cfd344c57e1e96394ce2176e (b5de5f0130fbb1c51bafb9ce8fbef6eaee8cff10)
- config.json +4 -2
- generation_config.json +1 -1
- model-00001-of-00004.safetensors +3 -0
- model-00002-of-00004.safetensors +3 -0
- model-00003-of-00004.safetensors +3 -0
- model-00004-of-00004.safetensors +3 -0
- model.safetensors.index.json +0 -0
config.json
CHANGED
@@ -4,6 +4,7 @@
|
|
4 |
"LlamaForCausalLM"
|
5 |
],
|
6 |
"attention_bias": false,
|
|
|
7 |
"bos_token_id": 1,
|
8 |
"eos_token_id": 2,
|
9 |
"hidden_act": "silu",
|
@@ -26,14 +27,15 @@
|
|
26 |
"model_version": 1,
|
27 |
"outlier_channel_split": false,
|
28 |
"packsz": 4,
|
29 |
-
"rescale_WH": false
|
|
|
30 |
},
|
31 |
"rms_norm_eps": 1e-05,
|
32 |
"rope_scaling": null,
|
33 |
"rope_theta": 10000.0,
|
34 |
"tie_word_embeddings": false,
|
35 |
"torch_dtype": "float16",
|
36 |
-
"transformers_version": "4.
|
37 |
"use_cache": true,
|
38 |
"vocab_size": 32000
|
39 |
}
|
|
|
4 |
"LlamaForCausalLM"
|
5 |
],
|
6 |
"attention_bias": false,
|
7 |
+
"attention_dropout": 0.0,
|
8 |
"bos_token_id": 1,
|
9 |
"eos_token_id": 2,
|
10 |
"hidden_act": "silu",
|
|
|
27 |
"model_version": 1,
|
28 |
"outlier_channel_split": false,
|
29 |
"packsz": 4,
|
30 |
+
"rescale_WH": false,
|
31 |
+
"resid_scale_override": -1
|
32 |
},
|
33 |
"rms_norm_eps": 1e-05,
|
34 |
"rope_scaling": null,
|
35 |
"rope_theta": 10000.0,
|
36 |
"tie_word_embeddings": false,
|
37 |
"torch_dtype": "float16",
|
38 |
+
"transformers_version": "4.36.2",
|
39 |
"use_cache": true,
|
40 |
"vocab_size": 32000
|
41 |
}
|
generation_config.json
CHANGED
@@ -6,5 +6,5 @@
|
|
6 |
"pad_token_id": 0,
|
7 |
"temperature": 0.6,
|
8 |
"top_p": 0.9,
|
9 |
-
"transformers_version": "4.
|
10 |
}
|
|
|
6 |
"pad_token_id": 0,
|
7 |
"temperature": 0.6,
|
8 |
"top_p": 0.9,
|
9 |
+
"transformers_version": "4.36.2"
|
10 |
}
|
model-00001-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ef6b7f34bf0f64297a78f7f29cdf65fa8555be48a2f533306d3a506efe293a04
|
3 |
+
size 4964068056
|
model-00002-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dcee277070f9bbfee6ee1a29017626160e35ed667b7a9da64ff009775365dadd
|
3 |
+
size 4985874080
|
model-00003-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bfb5821eca0bb1c08df6c87f0a9737f5c7c4f4abebfa946980bf1be360a6c568
|
3 |
+
size 4964866176
|
model-00004-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4216bcdd81a2169f8b7545f800ba194870371294fcabc0de2909f9fb9b5d1974
|
3 |
+
size 3271337328
|
model.safetensors.index.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|