Commit
·
d2b56aa
1
Parent(s):
661f41d
Initial commit
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +3 -0
- clip/model.onnx +3 -0
- embeds/EtGlowExecutionProvider_GLOW_graph_Extracted_from_-main_graph-_4608082067069852140_0_0_0.onnx +3 -0
- embeds/EtGlowExecutionProvider_GLOW_graph_Extracted_from_-main_graph-_9154130920003627498_0_0_0.onnx +3 -0
- embeds/model.embed_tokens.weight +3 -0
- embeds/model.onnx +3 -0
- llama/EtGlowExecutionProvider_GLOW_graph_Extracted_from_-Extracted_from_-Extracted_from_-Extracted_from_-Extracted_from_-main_graph-----_3732785077968901946_0_0_0.onnx +3 -0
- llama/model.embed_tokens.weight +3 -0
- llama/model.layers.0.input_layernorm.weight +3 -0
- llama/model.layers.0.post_attention_layernorm.weight +3 -0
- llama/model.layers.1.input_layernorm.weight +3 -0
- llama/model.layers.1.post_attention_layernorm.weight +3 -0
- llama/model.layers.10.input_layernorm.weight +3 -0
- llama/model.layers.10.post_attention_layernorm.weight +3 -0
- llama/model.layers.11.input_layernorm.weight +3 -0
- llama/model.layers.11.post_attention_layernorm.weight +3 -0
- llama/model.layers.12.input_layernorm.weight +3 -0
- llama/model.layers.12.post_attention_layernorm.weight +3 -0
- llama/model.layers.13.input_layernorm.weight +3 -0
- llama/model.layers.13.post_attention_layernorm.weight +3 -0
- llama/model.layers.14.input_layernorm.weight +3 -0
- llama/model.layers.14.post_attention_layernorm.weight +3 -0
- llama/model.layers.15.input_layernorm.weight +3 -0
- llama/model.layers.15.post_attention_layernorm.weight +3 -0
- llama/model.layers.16.input_layernorm.weight +3 -0
- llama/model.layers.16.post_attention_layernorm.weight +3 -0
- llama/model.layers.17.input_layernorm.weight +3 -0
- llama/model.layers.17.post_attention_layernorm.weight +3 -0
- llama/model.layers.18.input_layernorm.weight +3 -0
- llama/model.layers.18.post_attention_layernorm.weight +3 -0
- llama/model.layers.19.input_layernorm.weight +3 -0
- llama/model.layers.19.post_attention_layernorm.weight +3 -0
- llama/model.layers.2.input_layernorm.weight +3 -0
- llama/model.layers.2.post_attention_layernorm.weight +3 -0
- llama/model.layers.20.input_layernorm.weight +3 -0
- llama/model.layers.20.post_attention_layernorm.weight +3 -0
- llama/model.layers.21.input_layernorm.weight +3 -0
- llama/model.layers.21.post_attention_layernorm.weight +3 -0
- llama/model.layers.22.input_layernorm.weight +3 -0
- llama/model.layers.22.post_attention_layernorm.weight +3 -0
- llama/model.layers.23.input_layernorm.weight +3 -0
- llama/model.layers.23.post_attention_layernorm.weight +3 -0
- llama/model.layers.24.input_layernorm.weight +3 -0
- llama/model.layers.24.post_attention_layernorm.weight +3 -0
- llama/model.layers.25.input_layernorm.weight +3 -0
- llama/model.layers.25.post_attention_layernorm.weight +3 -0
- llama/model.layers.26.input_layernorm.weight +3 -0
- llama/model.layers.26.post_attention_layernorm.weight +3 -0
- llama/model.layers.27.input_layernorm.weight +3 -0
- llama/model.layers.27.post_attention_layernorm.weight +3 -0
.gitattributes
CHANGED
@@ -33,3 +33,6 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
embeds/model* filter=lfs diff=lfs merge=lfs -text
|
37 |
+
llama/onnx_* filter=lfs diff=lfs merge=lfs -text
|
38 |
+
llama/model* filter=lfs diff=lfs merge=lfs -text
|
clip/model.onnx
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f8e8ce301573e21c1745188dacfe2068055bb6fb8f7815bf0562da057ea58845
|
3 |
+
size 607873982
|
embeds/EtGlowExecutionProvider_GLOW_graph_Extracted_from_-main_graph-_4608082067069852140_0_0_0.onnx
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:edfe71a2058b303f2c1e289affa262b348b755d97a7f537af1a456bdd710386a
|
3 |
+
size 5319
|
embeds/EtGlowExecutionProvider_GLOW_graph_Extracted_from_-main_graph-_9154130920003627498_0_0_0.onnx
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f804b4ee67671050faff9082d9844ac06d0fbb633118637b197043846a19def4
|
3 |
+
size 5319
|
embeds/model.embed_tokens.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f1fd78a0bcd10e159d3182e74e16ea3af59fb0aae3f84e6c1ebdcbcbc215c2f4
|
3 |
+
size 262668288
|
embeds/model.onnx
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:20f4e176e3f76af2ad50a2b071e849e9040587a5a8d883c348abb9a21c94a4c9
|
3 |
+
size 4991
|
llama/EtGlowExecutionProvider_GLOW_graph_Extracted_from_-Extracted_from_-Extracted_from_-Extracted_from_-Extracted_from_-main_graph-----_3732785077968901946_0_0_0.onnx
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:250800c7f4dc659f8179a69242800775d2d984b7b42af42a0a8ac0382f5e766f
|
3 |
+
size 35517740
|
llama/model.embed_tokens.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f1fd78a0bcd10e159d3182e74e16ea3af59fb0aae3f84e6c1ebdcbcbc215c2f4
|
3 |
+
size 262668288
|
llama/model.layers.0.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:788fcd266b3f14f0ea1e8c76439a25ed5ea37a2edc24607dea033c4271dbe865
|
3 |
+
size 8192
|
llama/model.layers.0.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2a65a6ded090e8c60c1b3c08cae689cdd4500154ec2e66c1853ef08360fdf5cf
|
3 |
+
size 8192
|
llama/model.layers.1.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2b2a8ba1a0e66c3b22c5246984c946e93a3dfd683b836149e4d525a47003b9dc
|
3 |
+
size 8192
|
llama/model.layers.1.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9839680938200e6acb3f1f39f267e648e51775086a6845df1bf9aa306f6d7dc1
|
3 |
+
size 8192
|
llama/model.layers.10.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:df9081f04f4771290d3b645cfcbda72d553335f6e49a82dbea0f043246346ec5
|
3 |
+
size 8192
|
llama/model.layers.10.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ac07b78cd252bc238ee5609eb41a26b56bb449a2bea52bf941285d7729e8fb9c
|
3 |
+
size 8192
|
llama/model.layers.11.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:76f2ef2e25c19e496f98cd0c1d159c8fb555b7bfaa3ca4d4ee3370b3f084524c
|
3 |
+
size 8192
|
llama/model.layers.11.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d78ec7cec700e2543718df33a40f3e049b5cdabc2532dc19ad976113783b07e3
|
3 |
+
size 8192
|
llama/model.layers.12.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:29e57aee7ac84fe851430122865b135abe0ff170a63910f0b709a1d9b585c580
|
3 |
+
size 8192
|
llama/model.layers.12.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ed110e987d52f86e9f86ec3be17bff065461ad7d6cfaabff08fbb1855fb63554
|
3 |
+
size 8192
|
llama/model.layers.13.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5067dc834dbd7e9700707284501ace232b20fd40c170a72117320ceb83271b36
|
3 |
+
size 8192
|
llama/model.layers.13.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:674db5fa93c87299c2efed493b635cc284d394d615f1f901d57e8108cc095bb5
|
3 |
+
size 8192
|
llama/model.layers.14.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b330fa44596cb56070ff08cc8fa1c479c719adfa362c65835f4a60106d1486a0
|
3 |
+
size 8192
|
llama/model.layers.14.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3d4ca8e1775b8b7bbbfa032edd61bdc391ad8c62dc892ebbe955c474ac8bf63d
|
3 |
+
size 8192
|
llama/model.layers.15.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e7558fba0fe1de68b82fd00a1327cb4c2eb01d281a3a06c0912c1407fad29a95
|
3 |
+
size 8192
|
llama/model.layers.15.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9f3b741283231ac4cb57a17bc9384ef9ade29eeb974683d519c9a012ad14410f
|
3 |
+
size 8192
|
llama/model.layers.16.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:849f320d334130a962ca3e92b5a3802b95679de096f2d06f03ee792b77c58d90
|
3 |
+
size 8192
|
llama/model.layers.16.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f4de0f94f75520943831bd8e7d8ac85937ea8c194b0b2941cf373805ea48997a
|
3 |
+
size 8192
|
llama/model.layers.17.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f39c5f222d279b89f0d3700ca79e001ca057e06e2ee200aebdc10c81334bd1d7
|
3 |
+
size 8192
|
llama/model.layers.17.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:45aa6c7959cab88038fc5d32882effdcab4454d6e43038dc40a69a3fac7370a2
|
3 |
+
size 8192
|
llama/model.layers.18.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:38d1e99a81344cd53d3d3e020df605933fcebcbee82509624c3281a92d260d8c
|
3 |
+
size 8192
|
llama/model.layers.18.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:59317bee6997b085a939d6a0b224d02afc840c5c103c7c0839c6a85931328b1a
|
3 |
+
size 8192
|
llama/model.layers.19.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4524979d35e4fec1bef71b0418e9fbd10caf9d20a7d6b103f33cc26a981a5841
|
3 |
+
size 8192
|
llama/model.layers.19.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:98f698d7eab2afad2e82ddaa79e76a6871d6bff78581dd59bb31df43e82df329
|
3 |
+
size 8192
|
llama/model.layers.2.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:010ca1057e5d8907c96495a8f0f3ce41d6217df5bf3a6f68ac25016ca3937880
|
3 |
+
size 8192
|
llama/model.layers.2.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f2e7da11f71a58d5edff66c17746ef67dafea5c2724842b8c72c496788d94be3
|
3 |
+
size 8192
|
llama/model.layers.20.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ea7cd04e82ee8d9b27917d54b389229987a13fa6708f4abbfa54e173cb4810ed
|
3 |
+
size 8192
|
llama/model.layers.20.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bf1cf1639d6992512474a9ba286cf5a9cb55805e02c26eefcf94ab662594b6a9
|
3 |
+
size 8192
|
llama/model.layers.21.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:12f52f2011cc62f1088b3b3a5e7d83ff5b03d94ca583b7ecc4505383e145b6db
|
3 |
+
size 8192
|
llama/model.layers.21.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fdd175febe2061c061e3074b0229f0ae247b80cd523960b142e3b261b3cd095c
|
3 |
+
size 8192
|
llama/model.layers.22.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:232d2792145468906e3a5180dfd8613bb4a3431f2b128d6ff5082276695bcc84
|
3 |
+
size 8192
|
llama/model.layers.22.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bfc7dfd90a8d47bee608576150b991bbb66bd195d839377f6cb8d2f645b2ee6e
|
3 |
+
size 8192
|
llama/model.layers.23.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7774420e9efbcd1f78febb9ed9cea346a357c03a453b905ba75145247609fcbd
|
3 |
+
size 8192
|
llama/model.layers.23.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:76cf45638d09a9dc656fd806a1ecbac1642e4d57d13bc6dc7b0b3ff0517f5386
|
3 |
+
size 8192
|
llama/model.layers.24.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7d788a13c941debacd42496f63a40700dea780928b1d65c2b198f4350ef85b86
|
3 |
+
size 8192
|
llama/model.layers.24.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:27a217a518381a139d5ff46be5b960accf640a9541ad265a064b3d2150181cd1
|
3 |
+
size 8192
|
llama/model.layers.25.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a22732e09bea808e263c3794150d5f8da52cbf2ffbb3eb3c86c4c3f46275f10a
|
3 |
+
size 8192
|
llama/model.layers.25.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e5516b77348403e8dbf6c5d603f2464fc288a9887c709aeb976d16b19ffc05e6
|
3 |
+
size 8192
|
llama/model.layers.26.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6d2a0b1299b0ecf7762e40319a0411c9a8cfcc0f86d4b036703150a15a595171
|
3 |
+
size 8192
|
llama/model.layers.26.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:07e0b1917db8bb5df5aa0e92935aa75deedc5fe2dcc5365c38533b931a62beee
|
3 |
+
size 8192
|
llama/model.layers.27.input_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9342aa92ee8b5dffb126d980f8629327f0ecfcee7a4f789210915ae5179621fe
|
3 |
+
size 8192
|
llama/model.layers.27.post_attention_layernorm.weight
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4ce8ab7ba0d049e6a7f6dbf0e972aad5da50150fb6731ab264bf2133427baaf0
|
3 |
+
size 8192
|