File size: 6,187 Bytes
543b80b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
{
  "metadata": {
    "total_size": 7822800192
  },
  "weight_map": {
    "freqs_cis": "model-00001-of-00002.safetensors",
    "lm_head.weight": "model-00002-of-00002.safetensors",
    "transformer.adapter.weight": "model-00001-of-00002.safetensors",
    "transformer.coda.0.attn.Wqkv.weight": "model-00002-of-00002.safetensors",
    "transformer.coda.0.attn.proj.weight": "model-00002-of-00002.safetensors",
    "transformer.coda.0.attn.qk_bias": "model-00002-of-00002.safetensors",
    "transformer.coda.0.mlp.fc.weight": "model-00002-of-00002.safetensors",
    "transformer.coda.0.mlp.proj.weight": "model-00002-of-00002.safetensors",
    "transformer.coda.0.norm_1.weight": "model-00002-of-00002.safetensors",
    "transformer.coda.0.norm_2.weight": "model-00002-of-00002.safetensors",
    "transformer.coda.0.norm_3.weight": "model-00002-of-00002.safetensors",
    "transformer.coda.0.norm_4.weight": "model-00002-of-00002.safetensors",
    "transformer.coda.1.attn.Wqkv.weight": "model-00002-of-00002.safetensors",
    "transformer.coda.1.attn.proj.weight": "model-00002-of-00002.safetensors",
    "transformer.coda.1.attn.qk_bias": "model-00002-of-00002.safetensors",
    "transformer.coda.1.mlp.fc.weight": "model-00002-of-00002.safetensors",
    "transformer.coda.1.mlp.proj.weight": "model-00002-of-00002.safetensors",
    "transformer.coda.1.norm_1.weight": "model-00002-of-00002.safetensors",
    "transformer.coda.1.norm_2.weight": "model-00002-of-00002.safetensors",
    "transformer.coda.1.norm_3.weight": "model-00002-of-00002.safetensors",
    "transformer.coda.1.norm_4.weight": "model-00002-of-00002.safetensors",
    "transformer.core_block.0.attn.Wqkv.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.0.attn.proj.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.0.attn.qk_bias": "model-00001-of-00002.safetensors",
    "transformer.core_block.0.mlp.fc.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.0.mlp.proj.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.0.norm_1.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.0.norm_2.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.0.norm_3.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.0.norm_4.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.1.attn.Wqkv.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.1.attn.proj.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.1.attn.qk_bias": "model-00001-of-00002.safetensors",
    "transformer.core_block.1.mlp.fc.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.1.mlp.proj.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.1.norm_1.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.1.norm_2.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.1.norm_3.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.1.norm_4.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.2.attn.Wqkv.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.2.attn.proj.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.2.attn.qk_bias": "model-00001-of-00002.safetensors",
    "transformer.core_block.2.mlp.fc.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.2.mlp.proj.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.2.norm_1.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.2.norm_2.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.2.norm_3.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.2.norm_4.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.3.attn.Wqkv.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.3.attn.proj.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.3.attn.qk_bias": "model-00001-of-00002.safetensors",
    "transformer.core_block.3.mlp.fc.weight": "model-00002-of-00002.safetensors",
    "transformer.core_block.3.mlp.proj.weight": "model-00002-of-00002.safetensors",
    "transformer.core_block.3.norm_1.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.3.norm_2.weight": "model-00001-of-00002.safetensors",
    "transformer.core_block.3.norm_3.weight": "model-00002-of-00002.safetensors",
    "transformer.core_block.3.norm_4.weight": "model-00002-of-00002.safetensors",
    "transformer.ln_f.weight": "model-00002-of-00002.safetensors",
    "transformer.prelude.0.attn.Wqkv.weight": "model-00001-of-00002.safetensors",
    "transformer.prelude.0.attn.proj.weight": "model-00001-of-00002.safetensors",
    "transformer.prelude.0.attn.qk_bias": "model-00001-of-00002.safetensors",
    "transformer.prelude.0.mlp.fc.weight": "model-00001-of-00002.safetensors",
    "transformer.prelude.0.mlp.proj.weight": "model-00001-of-00002.safetensors",
    "transformer.prelude.0.norm_1.weight": "model-00001-of-00002.safetensors",
    "transformer.prelude.0.norm_2.weight": "model-00001-of-00002.safetensors",
    "transformer.prelude.0.norm_3.weight": "model-00001-of-00002.safetensors",
    "transformer.prelude.0.norm_4.weight": "model-00001-of-00002.safetensors",
    "transformer.prelude.1.attn.Wqkv.weight": "model-00001-of-00002.safetensors",
    "transformer.prelude.1.attn.proj.weight": "model-00001-of-00002.safetensors",
    "transformer.prelude.1.attn.qk_bias": "model-00001-of-00002.safetensors",
    "transformer.prelude.1.mlp.fc.weight": "model-00001-of-00002.safetensors",
    "transformer.prelude.1.mlp.proj.weight": "model-00001-of-00002.safetensors",
    "transformer.prelude.1.norm_1.weight": "model-00001-of-00002.safetensors",
    "transformer.prelude.1.norm_2.weight": "model-00001-of-00002.safetensors",
    "transformer.prelude.1.norm_3.weight": "model-00001-of-00002.safetensors",
    "transformer.prelude.1.norm_4.weight": "model-00001-of-00002.safetensors",
    "transformer.wte.weight": "model-00001-of-00002.safetensors"
  }
}