cowWhySo commited on
Commit
0a499ed
·
verified ·
1 Parent(s): 2b4bc27

Upload folder using huggingface_hub

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "microsoft/Phi-3-mini-4k-instruct",
3
  "architectures": [
4
  "Phi3ForCausalLM"
5
  ],
@@ -28,7 +28,7 @@
28
  "rope_theta": 10000.0,
29
  "sliding_window": 2047,
30
  "tie_word_embeddings": false,
31
- "torch_dtype": "bfloat16",
32
  "transformers_version": "4.41.1",
33
  "use_cache": true,
34
  "vocab_size": 32064
 
1
  {
2
+ "_name_or_path": "cowWhySo/Phi-3-mini-4k-instruct-Friendly",
3
  "architectures": [
4
  "Phi3ForCausalLM"
5
  ],
 
28
  "rope_theta": 10000.0,
29
  "sliding_window": 2047,
30
  "tie_word_embeddings": false,
31
+ "torch_dtype": "float16",
32
  "transformers_version": "4.41.1",
33
  "use_cache": true,
34
  "vocab_size": 32064
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:214f4e81adcf6cf15a476047c02c35fa7d180cf573f505e1abee707734f791ac
3
- size 4972489328
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:335eefd8ecf79ae672384481808d9010044117ee65f9d49e52ddddb9eb007648
3
+ size 4972489200
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a642811ca2f514a371b1cc944977258014e0bf04a22b35694f4d58d6e07ae500
3
- size 2669692552
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3236d71667c0133862d7b3865c54e5032a20887c0a8c9d6b2c70d30119a31f30
3
+ size 2669692488