Diogo-V commited on
Commit
0b795a7
·
verified ·
1 Parent(s): 00b2965

Adding more results

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. params/tinyllama/8/rnd_affine_scale/comb_12/init/lm_head/_0.pt +3 -0
  2. params/tinyllama/8/rnd_affine_scale/comb_12/init/lm_head/_s.pt +3 -0
  3. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.mlp.down_proj/_0.pt +3 -0
  4. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.mlp.down_proj/_s.pt +3 -0
  5. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.mlp.gate_proj/_0.pt +3 -0
  6. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.mlp.gate_proj/_s.pt +3 -0
  7. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.mlp.up_proj/_0.pt +3 -0
  8. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.mlp.up_proj/_s.pt +3 -0
  9. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.self_attn.k_proj/_0.pt +3 -0
  10. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.self_attn.k_proj/_s.pt +3 -0
  11. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.self_attn.o_proj/_0.pt +3 -0
  12. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.self_attn.o_proj/_s.pt +3 -0
  13. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.self_attn.q_proj/_0.pt +3 -0
  14. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.self_attn.q_proj/_s.pt +3 -0
  15. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.self_attn.v_proj/_0.pt +3 -0
  16. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.self_attn.v_proj/_s.pt +3 -0
  17. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.mlp.down_proj/_0.pt +3 -0
  18. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.mlp.down_proj/_s.pt +3 -0
  19. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.mlp.gate_proj/_0.pt +3 -0
  20. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.mlp.gate_proj/_s.pt +3 -0
  21. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.mlp.up_proj/_0.pt +3 -0
  22. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.mlp.up_proj/_s.pt +3 -0
  23. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.self_attn.k_proj/_0.pt +3 -0
  24. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.self_attn.k_proj/_s.pt +3 -0
  25. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.self_attn.o_proj/_0.pt +3 -0
  26. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.self_attn.o_proj/_s.pt +3 -0
  27. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.self_attn.q_proj/_0.pt +3 -0
  28. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.self_attn.q_proj/_s.pt +3 -0
  29. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.self_attn.v_proj/_0.pt +3 -0
  30. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.self_attn.v_proj/_s.pt +3 -0
  31. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.mlp.down_proj/_0.pt +3 -0
  32. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.mlp.down_proj/_s.pt +3 -0
  33. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.mlp.gate_proj/_0.pt +3 -0
  34. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.mlp.gate_proj/_s.pt +3 -0
  35. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.mlp.up_proj/_0.pt +3 -0
  36. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.mlp.up_proj/_s.pt +3 -0
  37. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.self_attn.k_proj/_0.pt +3 -0
  38. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.self_attn.k_proj/_s.pt +3 -0
  39. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.self_attn.o_proj/_0.pt +3 -0
  40. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.self_attn.o_proj/_s.pt +3 -0
  41. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.self_attn.q_proj/_0.pt +3 -0
  42. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.self_attn.q_proj/_s.pt +3 -0
  43. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.self_attn.v_proj/_0.pt +3 -0
  44. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.self_attn.v_proj/_s.pt +3 -0
  45. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.11.mlp.down_proj/_0.pt +3 -0
  46. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.11.mlp.down_proj/_s.pt +3 -0
  47. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.11.mlp.gate_proj/_0.pt +3 -0
  48. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.11.mlp.gate_proj/_s.pt +3 -0
  49. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.11.mlp.up_proj/_0.pt +3 -0
  50. params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.11.mlp.up_proj/_s.pt +3 -0
params/tinyllama/8/rnd_affine_scale/comb_12/init/lm_head/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8394ff4b2e986b0124c927bcc439228f6034386a24ae0988b7feabfff9b3593
3
+ size 129155
params/tinyllama/8/rnd_affine_scale/comb_12/init/lm_head/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7aea2de00f7d85e6b5539802265b4f1ffe76a3f3b130ccdac23273110709cc7a
3
+ size 129155
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.mlp.down_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc51fbb6110ef09aace0037aa1aba295314a1ad39332c80e01de22b7f6c31bb7
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:70ea6cd0efb52288ab4408fa7057d73b2b84dd42d77028839255c0c206cacedf
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.mlp.gate_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:009267b9a0dc693ceaa7b8e9348eaa79f66928fd68f1c005e04fc04b6901308d
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2ca4f37c3a6fdb1e4e96fa79fa437d9abde253d60230819f7261de9c24a00f0a
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.mlp.up_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:27b1263cce7e8db77c1dd0199810c484625f78a691cea606246af67093bc300b
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d37a6e1ca9bb89274c567356ec9fdd532c68dc3017553748d2efe4d3c351d0d6
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.self_attn.k_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b94b053d8d87695b2c58c72d433ff0b6e821c04b7c4f60a5636e0ceba24a8bf3
3
+ size 2179
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:973b9b2a6da4101bb3ca9cc34e489d43ab2648af27beb527e0a35a788b11f9a1
3
+ size 2179
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.self_attn.o_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9bcd8e1706c139a40b7501bc76323c5af7b70cc64074c890430310ecb39edbf1
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2249ce09006f348cbf2f5405d5bd31d158b6cbcff3ef865aa2ca0a08f42e32ff
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.self_attn.q_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8201fba1a494694254d432e55362f7e2003f6c95d47c543e0fba98eb8b751a80
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e841a2b384a9d91a8e302e0fdc8c5477616f2d745c41bae2760ea4e8221b632a
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.self_attn.v_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a86b6c24bc7dbc22136e7aa10112cdc09acdf576e8919ada350eec9e724248c8
3
+ size 2179
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.0.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3046a76062207731ec052b041d0beadb0ecc28d2c70de6c6526bac2162d1442f
3
+ size 2179
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.mlp.down_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ba2e24ebe3a93560e6a725e96eb5ebc370094638485a311aa42aa857481cef0f
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b1bbcd8d2797bc5081e74a3df7f5f39d8d9836c64e006fce3aa36423d2167e33
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.mlp.gate_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c61d9b91995790a6970725cf594d23b2b5e63a01bd55633be61af72b780042bc
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ddb7ba4c07135ed509fd3b106f191a5799b18deec4730ed4bced054988c00009
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.mlp.up_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d1a4920c84b7fca93fd2031a24fffd4949ae255b1c2a0abadcc8752c62328859
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6a4e191cd595bf6def5a41fcf8f6ba8521670973a933a9298b5f35394e8434fd
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.self_attn.k_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d7275226c36fb72ea354a5ba5d12c4c6562229cdc4c526e261adbd0b6426cec8
3
+ size 2179
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:45f04216c8fe0e6db010bed096621fb1dfe60cd9037bc7f0fffe147ec85de069
3
+ size 2179
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.self_attn.o_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b341b663f5ec036a833d013cfaa721a7ee7301bdbb014b786dfa85c093dbf778
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:053010e3b44ae5f6923da3253b457f9c5985c38c42fea7cfadbb42de9e54093c
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.self_attn.q_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6e5a0ce615175cf88ab739f73e3e1ccb03c450407d8dc4883aa840a4d0e3fda9
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fd19a9be6a8905c4e594a4206e3dd3766ba0693551d10dfed075ab905ddda56d
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.self_attn.v_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8fc3069311577b5163a201f0914cb62eddd63a310f9517eafa749f32bc46ab3a
3
+ size 2179
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.1.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ea40ddfecf98284253b9cbd13de9e2c5fa9f8ca5288cb72ed7c616772b88b634
3
+ size 2179
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.mlp.down_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:450ccb116a9ecd77feb3cbe7b61ea11f2f9c6e17afc5bcba40731c04830ba8be
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:89384d658ac5881f3ed9971234429d35b5b86c35a61155ae0e4501c00e7c3061
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.mlp.gate_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c085d3d4972c101e800fec47a3fdc2dda334126cca4c950f1567e83f46a9e845
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e813fa62b8b466bf18a53976be3d3857cb691302fe5563356348bb44f9f7fb94
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.mlp.up_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa14db735a60ec46e702e4e7600f63d790bd21c72a2fe0237daaaefc88273687
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aad15e6c11753556a6f49d21ac76625f602ce4a554580dc231e2749dd34af1cb
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.self_attn.k_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:595cbfd7ff202a0527bcaac811e3a0a38261558875ed29c5fca7a52ec9636c2a
3
+ size 2179
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08c1b3cb48d89df311a88485a26faf8047bf88126860b93bec0de5d3456bb3cc
3
+ size 2179
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.self_attn.o_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:89b81519c6e42219903c5cfd57ffb7ba86fbb3c6109172ab838fbbf32efc4297
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7e14465e710869b4a046654ad8ec53fc85b3dfab4461a95532e3a2ee17f22b8a
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.self_attn.q_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:61532306f6bff6cf641532283f689c5b5f3f0e062f807e9ccf6ce1fcef4f2dc2
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f0ebfa26f92dac63b5930fc96d120b6f10588ed9164de93e38d9f6577ca85ae5
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.self_attn.v_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76bb906094e59bb1188e51587b3c6cdbbd54d1f8e795eaf8a4da16c8d907d83a
3
+ size 2179
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.10.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7f0ce7fdb58678f3cdaf3e71215a307917bc7a9ae9755d7677f985d2b25cd681
3
+ size 2179
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.11.mlp.down_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ff06dc26d31c2f9419b7dbb8931616f0524f132cb53d2f78b0e804c3dede0130
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.11.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4484d468eec0a2a3db68eb598d3d7cb43a4aa7ec6f609bdbbf7e131b0c5ccb7e
3
+ size 9347
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.11.mlp.gate_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c4474fdebe72bec1f7dd0bbba6758363b86053152e66e3db166486edc73029a1
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.11.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b4f73fb80832edcb9461ddb6ad0b93db1df52d37373b0c7be83fae694bbc154
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.11.mlp.up_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:79a53a211582d6a52eec8ddb45af181ef5f80a0d6a8e473e3cdabf8631d68bee
3
+ size 23683
params/tinyllama/8/rnd_affine_scale/comb_12/init/model.layers.11.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cf58cc8e67ac217d5ae1e66e071109f285e64e47ffda35f057520f6233aab5b3
3
+ size 23683