Diogo-V commited on
Commit
10c0df0
·
verified ·
1 Parent(s): 70a8767

Adding more results

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. params/tinyllama/8/rnd_affine_scale/comb_16/init/lm_head/_0.pt +3 -0
  2. params/tinyllama/8/rnd_affine_scale/comb_16/init/lm_head/_s.pt +3 -0
  3. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.mlp.down_proj/_0.pt +3 -0
  4. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.mlp.down_proj/_s.pt +3 -0
  5. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.mlp.gate_proj/_0.pt +3 -0
  6. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.mlp.gate_proj/_s.pt +3 -0
  7. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.mlp.up_proj/_0.pt +3 -0
  8. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.mlp.up_proj/_s.pt +3 -0
  9. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.self_attn.k_proj/_0.pt +3 -0
  10. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.self_attn.k_proj/_s.pt +3 -0
  11. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.self_attn.o_proj/_0.pt +3 -0
  12. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.self_attn.o_proj/_s.pt +3 -0
  13. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.self_attn.q_proj/_0.pt +3 -0
  14. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.self_attn.q_proj/_s.pt +3 -0
  15. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.self_attn.v_proj/_0.pt +3 -0
  16. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.self_attn.v_proj/_s.pt +3 -0
  17. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.mlp.down_proj/_0.pt +3 -0
  18. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.mlp.down_proj/_s.pt +3 -0
  19. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.mlp.gate_proj/_0.pt +3 -0
  20. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.mlp.gate_proj/_s.pt +3 -0
  21. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.mlp.up_proj/_0.pt +3 -0
  22. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.mlp.up_proj/_s.pt +3 -0
  23. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.self_attn.k_proj/_0.pt +3 -0
  24. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.self_attn.k_proj/_s.pt +3 -0
  25. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.self_attn.o_proj/_0.pt +3 -0
  26. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.self_attn.o_proj/_s.pt +3 -0
  27. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.self_attn.q_proj/_0.pt +3 -0
  28. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.self_attn.q_proj/_s.pt +3 -0
  29. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.self_attn.v_proj/_0.pt +3 -0
  30. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.self_attn.v_proj/_s.pt +3 -0
  31. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.mlp.down_proj/_0.pt +3 -0
  32. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.mlp.down_proj/_s.pt +3 -0
  33. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.mlp.gate_proj/_0.pt +3 -0
  34. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.mlp.gate_proj/_s.pt +3 -0
  35. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.mlp.up_proj/_0.pt +3 -0
  36. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.mlp.up_proj/_s.pt +3 -0
  37. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.self_attn.k_proj/_0.pt +3 -0
  38. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.self_attn.k_proj/_s.pt +3 -0
  39. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.self_attn.o_proj/_0.pt +3 -0
  40. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.self_attn.o_proj/_s.pt +3 -0
  41. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.self_attn.q_proj/_0.pt +3 -0
  42. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.self_attn.q_proj/_s.pt +3 -0
  43. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.self_attn.v_proj/_0.pt +3 -0
  44. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.self_attn.v_proj/_s.pt +3 -0
  45. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.11.mlp.down_proj/_0.pt +3 -0
  46. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.11.mlp.down_proj/_s.pt +3 -0
  47. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.11.mlp.gate_proj/_0.pt +3 -0
  48. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.11.mlp.gate_proj/_s.pt +3 -0
  49. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.11.mlp.up_proj/_0.pt +3 -0
  50. params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.11.mlp.up_proj/_s.pt +3 -0
params/tinyllama/8/rnd_affine_scale/comb_16/init/lm_head/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c0153c0ed311dd7310034e19d5d971ef2fc670ee51fecfd0667eaaeb6be3d4ec
3
+ size 129155
params/tinyllama/8/rnd_affine_scale/comb_16/init/lm_head/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0952266daf9e20c1f3e87342678318fd1f188cc9ea2c3b39bdb5a824bbf68db8
3
+ size 129155
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.mlp.down_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:47166df0a6e93a2157132f81798fa475f049f4764b9d34343aacd6782a34689d
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:963a0a181fcfb65aac385791bac01266b0531a4a446de49f760ad961c0e8e74e
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.mlp.gate_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dfe4e2bd92da223e6e1ba86557a35e46342d30cf1343464e0ef212f6142cb7e6
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2cd5a50cd34d587db668f6a324ecbadd94cee9d52fda91f14834503da3e41c92
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.mlp.up_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:045c31c5cebbf18c9220326096bdbb77bd2a33ee4af43cec097f10c7b7dd20cc
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ff09259a06402c7795de8293d9663290a3a59adf04cff799ffe1106f5512785
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.self_attn.k_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ac33eda9abd8ed022947e65c5e2be30ffba32a91f51562bb59f59b698d31ab8b
3
+ size 2179
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b37fed097a1d61fce5ce72e45cfd4ac38f29d79644f03584449c3f62ac34243a
3
+ size 2179
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.self_attn.o_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:936e6f6017946c0ca3529d865ccc525a852a7be1ecd4c87422bdd22acf4b6d87
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dc8f42e35001803dc12d130cc6fdf21a09c7627fd2de7943e97e4abedb0f6cf9
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.self_attn.q_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7769c57b3074b996ff40a0b5e4c62cb64703fcfea891e04a0a5d5eca28853424
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eb36ec1c1dace91001a0cf40bc696ba32b31f0f48ed4eb54fb523c465b4d1c11
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.self_attn.v_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:169565e2e13b5f2a9f7d55d535ba9ed733bb8aa5703665b369721dfdfa3c741a
3
+ size 2179
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.0.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5f44f311a5b87f95f6bdf5f253c71c3d74a8b51c564a1671ee508d9bb63d4919
3
+ size 2179
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.mlp.down_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5638e584cc6797642849e4dbd666cb490b6bb3fdebc76c561541fc1022714b68
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0908025ee30948c706234a7f9b93e4730cf280ebd0687f491906864cde9ab12f
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.mlp.gate_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c501e6b9ad0c1a5f3d516262a203e5065320462ca522682e2494266eb0127b17
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0127ba4562d73e9ab8fb2b57e95772a2b7d20eb12dec3c6ec25561e89bd05404
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.mlp.up_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d9454507cef68030b8318f14b1771e70499769ed93e304389c4188540b116b46
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:57de5053f59b8d383f9316b769b8ad31e25b08ff17400c5053620db2adf4f993
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.self_attn.k_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fbef47b75815abe71e688fb696a43cf215009c469a1c1a6ef03f7b387b910770
3
+ size 2179
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:34bbfa21264b131c71e11f4f6937bb244d861ded4f9fe5d01f6a2d5eed3f375d
3
+ size 2179
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.self_attn.o_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dada1bfc7eafe2fb5f5eef332678f5e9f442e33d2700bc4a86367f3cd8bc582f
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6731d71fec8bf22619559f1f448aa666e6f8043973512195d0cb823108d44ae8
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.self_attn.q_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d8d776dd56a92fb398aa19bea2170bdbb4d670c4c91846a2cbcfeeda570a1a85
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:622d779e81dea2f3db2f038bc209b69f4f9bef4fd0346c7f9a6a6bb26a1ba060
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.self_attn.v_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:81a618f999563b562385eeb96310f95d2ce1f8f212989688ac85709e9b92cc57
3
+ size 2179
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.1.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d2ffb93a6c963c627ebe5599d1f9aea05c26fd11086873003017fbc20d1bc21
3
+ size 2179
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.mlp.down_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:adf4245a6b998d49c262c2668a048a340e2b4082abca157dd10174ea11dcd0c5
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a0944bb1818bbc3fdc8fb472efeebbca2a52baf8654b919838089fb3393edeab
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.mlp.gate_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:04ee099586b751a73bcc9b9c113b51cd417ed470f8afe02250abeb996e145f38
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08aff9d6716467c4743e0c71de79a4591c2a6b3f524bfe32bd8b6cf315a5df4c
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.mlp.up_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ccdc708a1cd5bb8148574b023a02752790410982a14bf358cffe969333de210d
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:55e40391fc07d0bc7c278c96a26c899e6de9e7c445cca7ef052211b5507d2812
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.self_attn.k_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1494100de9f57473b9ecb68e4ac653597352b4a8c1ee61bb725cd5242543b911
3
+ size 2179
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bff96ea234c5fc7ea77f367aa9e6bc399710491e4b8954a2fbc89d6ef76d3a45
3
+ size 2179
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.self_attn.o_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c8f4cfd45a043a83e1538718ffa9a97988e39bb3863e9509caa5672f02c19395
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:acffef650aa92d4f86279542550286291d4ac5997381c6b0fc91d397eb5eb723
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.self_attn.q_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ebf95acd22252fa8f7b910f951a2e15983a02ebf827f43b18c673cad2c0e78cf
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4876537ade46d7fa32974a9b6e840b8447f469638c074dd4ebeaadcedb9bc72f
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.self_attn.v_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bfea75e6c1a97c8d2b47c1f2deea7a8ac59c336f56505057bdd71f875925f76d
3
+ size 2179
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.10.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ee46d4cd84e1c63ce8637826fc15ab842c79a4d8ad166ab55a246a7c5084975
3
+ size 2179
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.11.mlp.down_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd190eecec3abf2ca644b16375c5beb4910d7ebef14265452a1915451e8d2ad3
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.11.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d263a0892acdea251c04b16a9128009fecb54d81129095fef08f1a1527698952
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.11.mlp.gate_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0c6a3f30d307ed5a063a7d6be415bffa6622ba7272cc22fe42468e79de423f16
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.11.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:35dd1aafc00588db54f1119ac13bfab2a076ffe806a43181607406da6d03499f
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.11.mlp.up_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9cf733ef6a014bda0b1f207eadf11f24c263c7104760bd102fcd57e70aaceaf1
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_16/init/model.layers.11.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a17d782921ee9d7597cc50ecb80faa3fe7c90c255779d2c410f8215247404b4b
3
+ size 23683