m-i commited on
Commit
a550e0b
·
verified ·
1 Parent(s): 6d3bd1a

Add files using upload-large-folder tool

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: mit
3
+ license_link: https://huggingface.co/rednote-hilab/dots.llm1.inst/blob/main/LICENSE
4
+ pipeline_tag: text-generation
5
+ base_model: rednote-hilab/dots.llm1.inst
6
+ tags:
7
+ - chat
8
+ - mlx
9
+ library_name: mlx
10
+ language:
11
+ - en
12
+ - zh
13
+ ---
14
+
15
+ # mlx-community/dots.llm1.inst-mixed-4-6bit
16
+
17
+ This model [mlx-community/dots.llm1.inst-mixed-4-6bit](https://huggingface.co/mlx-community/dots.llm1.inst-mixed-4-6bit) was
18
+ converted to MLX format from [rednote-hilab/dots.llm1.inst](https://huggingface.co/rednote-hilab/dots.llm1.inst)
19
+ using mlx-lm version **0.25.3**.
20
+
21
+ ## Use with mlx
22
+
23
+ ```bash
24
+ pip install mlx-lm
25
+ ```
26
+
27
+ ```python
28
+ from mlx_lm import load, generate
29
+
30
+ model, tokenizer = load("mlx-community/dots.llm1.inst-mixed-4-6bit")
31
+
32
+ prompt = "hello"
33
+
34
+ if tokenizer.chat_template is not None:
35
+ messages = [{"role": "user", "content": prompt}]
36
+ prompt = tokenizer.apply_chat_template(
37
+ messages, add_generation_prompt=True
38
+ )
39
+
40
+ response = generate(model, tokenizer, prompt=prompt, verbose=True)
41
+ ```
added_tokens.json ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "<|endofexecution|>": 151655,
3
+ "<|endofobservation|>": 151653,
4
+ "<|endofresponse|>": 151649,
5
+ "<|endofsystem|>": 151651,
6
+ "<|endoftext|>": 151643,
7
+ "<|endofuserprompt|>": 151647,
8
+ "<|execution|>": 151654,
9
+ "<|im_end|>": 151645,
10
+ "<|im_start|>": 151644,
11
+ "<|observation|>": 151652,
12
+ "<|reject-unknown|>": 151656,
13
+ "<|response|>": 151648,
14
+ "<|sec-cot|>": 151657,
15
+ "<|sec-end-cot|>": 151658,
16
+ "<|system|>": 151650,
17
+ "<|userprompt|>": 151646
18
+ }
config.json ADDED
The diff for this file is too large to render. See raw diff
 
generation_config.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "do_sample": true,
4
+ "temperature": 0.7,
5
+ "top_p": 0.8,
6
+ "bos_token_id": 151643,
7
+ "eos_token_id": [
8
+ 151643,
9
+ 151649
10
+ ],
11
+ "transformers_version": "4.46.3"
12
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76e8d751d5d0418f184950f53665ab568c9d6b60e52affda5fbf18113f2751c6
3
+ size 5007970758
model-00002-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fac4ae963cd41f7ffc5285cd91ff7a9816af2163b6b42a0c9e2de458032b21e1
3
+ size 5313134128
model-00003-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d604596e5f5ba79563e6339984e9112866ee5e3900c66a47573ee00bad82a595
3
+ size 5361454626
model-00004-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:549705ef00e16e31ae87f8cf23f29fe1e15e0bc4a3508ba11a960bcaf4261abd
3
+ size 5042687795
model-00005-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e61c3f1302c16c28a1e05236d56a95788c884c811ae62f9da387fdaf007c1517
3
+ size 5179788805
model-00006-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f871583a1bf60c8d908b76bc8254e0d8cb41a73b4f83110836cc441579135a9c
3
+ size 5363336429
model-00007-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5995f7281fadcd3d98a57908651f1926df6f292bfab218dbb20ca547d017e652
3
+ size 5347428844
model-00008-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8be20dcd5ac22f1aa5f297413a981a776ef23b9739e2511b2560dfce0fd04dd
3
+ size 5042687769
model-00009-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:996902c52336302d19125c44afb05b27dc3bbddb4e20e8e34ed438878fd39ab6
3
+ size 5179788905
model-00010-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6f8903e378342389960852853e8531cb2b5ddaff4f697fb4ac25f8fb4f0ab36
3
+ size 5361454716
model-00011-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8c3c846ef300fe94651a3a94fa49181741463fd1cb5e1621d5098cf7da3434aa
3
+ size 5042687765
model-00012-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f34532b987aaf0b63d645c8fe6c5a2d65bc2bd78c15dfcfb924082cb33d7b16f
3
+ size 4934074034
model-00013-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:413dbfd60efa1f72d10a80feb7209c01e8b24bfb607bebc140938b47fcde3187
3
+ size 5179788849
model-00014-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ff854054ea54460f2e501e6bf9867ec038489c0df6e23c62ac3eca785568f84a
3
+ size 5361454738
model-00015-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f37fe9088c856a722f941b1ed76ce2b0f5c99e271a5f2fa7850bfcb1a2cbfeb4
3
+ size 5368120074
model-00016-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7aef719278ef6d78415e934b031f3c7122a26155c8642cfe3b2e8944d59be51a
3
+ size 5352082038
model-00017-of-00017.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e7e7da6f7282ec4df4b4866d59f7242ff86c9415000f3eefc36772bdacbff38
3
+ size 2647965319
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
special_tokens_map.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|userprompt|>",
6
+ "<|endofuserprompt|>",
7
+ "<|response|>",
8
+ "<|endofresponse|>",
9
+ "<|system|>",
10
+ "<|endofsystem|>",
11
+ "<|observation|>",
12
+ "<|endofobservation|>",
13
+ "<|execution|>",
14
+ "<|endofexecution|>",
15
+ "<|reject-unknown|>",
16
+ "<|sec-cot|>",
17
+ "<|sec-end-cot|>"
18
+ ],
19
+ "eos_token": {
20
+ "content": "<|endofresponse|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false
25
+ },
26
+ "pad_token": {
27
+ "content": "<|endofresponse|>",
28
+ "lstrip": false,
29
+ "normalized": false,
30
+ "rstrip": false,
31
+ "single_word": false
32
+ }
33
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe750f7b1f42fb8f760ee7ee91fa5fb3974795b3dc14c269819b83a086f5e98d
3
+ size 11420764
tokenizer_config.json ADDED
@@ -0,0 +1,161 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "151643": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "151644": {
13
+ "content": "<|im_start|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "151645": {
21
+ "content": "<|im_end|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "151646": {
29
+ "content": "<|userprompt|>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": true
35
+ },
36
+ "151647": {
37
+ "content": "<|endofuserprompt|>",
38
+ "lstrip": false,
39
+ "normalized": false,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": true
43
+ },
44
+ "151648": {
45
+ "content": "<|response|>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": true
51
+ },
52
+ "151649": {
53
+ "content": "<|endofresponse|>",
54
+ "lstrip": false,
55
+ "normalized": false,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": true
59
+ },
60
+ "151650": {
61
+ "content": "<|system|>",
62
+ "lstrip": false,
63
+ "normalized": false,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": true
67
+ },
68
+ "151651": {
69
+ "content": "<|endofsystem|>",
70
+ "lstrip": false,
71
+ "normalized": false,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": true
75
+ },
76
+ "151652": {
77
+ "content": "<|observation|>",
78
+ "lstrip": false,
79
+ "normalized": false,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": true
83
+ },
84
+ "151653": {
85
+ "content": "<|endofobservation|>",
86
+ "lstrip": false,
87
+ "normalized": false,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": true
91
+ },
92
+ "151654": {
93
+ "content": "<|execution|>",
94
+ "lstrip": false,
95
+ "normalized": false,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": true
99
+ },
100
+ "151655": {
101
+ "content": "<|endofexecution|>",
102
+ "lstrip": false,
103
+ "normalized": false,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": true
107
+ },
108
+ "151656": {
109
+ "content": "<|reject-unknown|>",
110
+ "lstrip": false,
111
+ "normalized": false,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": true
115
+ },
116
+ "151657": {
117
+ "content": "<|sec-cot|>",
118
+ "lstrip": false,
119
+ "normalized": false,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": true
123
+ },
124
+ "151658": {
125
+ "content": "<|sec-end-cot|>",
126
+ "lstrip": false,
127
+ "normalized": false,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": true
131
+ }
132
+ },
133
+ "additional_special_tokens": [
134
+ "<|im_start|>",
135
+ "<|im_end|>",
136
+ "<|userprompt|>",
137
+ "<|endofuserprompt|>",
138
+ "<|response|>",
139
+ "<|endofresponse|>",
140
+ "<|system|>",
141
+ "<|endofsystem|>",
142
+ "<|observation|>",
143
+ "<|endofobservation|>",
144
+ "<|execution|>",
145
+ "<|endofexecution|>",
146
+ "<|reject-unknown|>",
147
+ "<|sec-cot|>",
148
+ "<|sec-end-cot|>"
149
+ ],
150
+ "bos_token": null,
151
+ "chat_template": "{% if messages[0]['role'] == 'system' %}<|system|>{{ messages[0]['content'] }}<|endofsystem|>{% set start_idx = 1 %}{% else %}<|system|>You are a helpful assistant.<|endofsystem|>{% set start_idx = 0 %}{% endif %}{% for idx in range(start_idx, messages|length) %}{% if messages[idx]['role'] == 'user' %}<|userprompt|>{{ messages[idx]['content'] }}<|endofuserprompt|>{% elif messages[idx]['role'] == 'assistant' %}<|response|>{{ messages[idx]['content'] }}<|endofresponse|>{% endif %}{% endfor %}{% if add_generation_prompt and messages[-1]['role'] == 'user' %}<|response|>{% endif %}",
152
+ "clean_up_tokenization_spaces": false,
153
+ "eos_token": "<|endofresponse|>",
154
+ "errors": "replace",
155
+ "extra_special_tokens": {},
156
+ "model_max_length": 131072,
157
+ "pad_token": "<|endofresponse|>",
158
+ "split_special_tokens": false,
159
+ "tokenizer_class": "Qwen2Tokenizer",
160
+ "unk_token": null
161
+ }
vocab.json ADDED
The diff for this file is too large to render. See raw diff