nm-autobot commited on
Commit
10aaa0d
·
verified ·
1 Parent(s): 0042b2e

Upload folder using huggingface_hub

Browse files
chat_template.jinja ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {% for message in messages %}
2
+ {% if message['role'] == 'user' %}
3
+ {{ '<|user|>
4
+ ' + message['content'] + eos_token }}
5
+ {% elif message['role'] == 'system' %}
6
+ {{ '<|system|>
7
+ ' + message['content'] + eos_token }}
8
+ {% elif message['role'] == 'assistant' %}
9
+ {{ '<|assistant|>
10
+ ' + message['content'] + eos_token }}
11
+ {% endif %}
12
+ {% if loop.last and add_generation_prompt %}
13
+ {{ '<|assistant|>' }}
14
+ {% endif %}
15
+ {% endfor %}
config.json CHANGED
@@ -77,7 +77,7 @@
77
  "rope_theta": 10000.0,
78
  "tie_word_embeddings": false,
79
  "torch_dtype": "bfloat16",
80
- "transformers_version": "4.51.3",
81
  "use_cache": true,
82
  "vocab_size": 32000
83
  }
 
77
  "rope_theta": 10000.0,
78
  "tie_word_embeddings": false,
79
  "torch_dtype": "bfloat16",
80
+ "transformers_version": "4.52.1",
81
  "use_cache": true,
82
  "vocab_size": 32000
83
  }
generation_config.json CHANGED
@@ -3,5 +3,5 @@
3
  "eos_token_id": 2,
4
  "max_length": 2048,
5
  "pad_token_id": 0,
6
- "transformers_version": "4.51.3"
7
  }
 
3
  "eos_token_id": 2,
4
  "max_length": 2048,
5
  "pad_token_id": 0,
6
+ "transformers_version": "4.52.1"
7
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0fb56c95c0c9fbc13e1bb14aadd6de4e3e0ca6a8e7244f801545d56b565f1a87
3
  size 868745640
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1b869d1fd183520bc3ca2fb7f1ee6c488550e335e428253bc1b6cc1bf043cde8
3
  size 868745640
tokenizer_config.json CHANGED
@@ -29,7 +29,6 @@
29
  }
30
  },
31
  "bos_token": "<s>",
32
- "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n' + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
33
  "clean_up_tokenization_spaces": false,
34
  "eos_token": "</s>",
35
  "extra_special_tokens": {},
 
29
  }
30
  },
31
  "bos_token": "<s>",
 
32
  "clean_up_tokenization_spaces": false,
33
  "eos_token": "</s>",
34
  "extra_special_tokens": {},