liamcripwell commited on
Commit
895efda
·
verified ·
1 Parent(s): b558add

Update chat template

Browse files
Files changed (1) hide show
  1. tokenizer_config.json +2 -2
tokenizer_config.json CHANGED
@@ -169,7 +169,7 @@
169
  ]
170
  },
171
  "bos_token": "<s>",
172
- "chat_template": "{{ bos_token }}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
173
  "clean_up_tokenization_spaces": false,
174
  "eos_token": "</s>",
175
  "extra_special_tokens": {},
@@ -177,4 +177,4 @@
177
  "pad_token": "</s>",
178
  "tokenizer_class": "InternLM2Tokenizer",
179
  "unk_token": "<unk>"
180
- }
 
169
  ]
170
  },
171
  "bos_token": "<s>",
172
+ "chat_template": "{{ bos_token }}{% for message in messages %}\n {%- if message['role'] == 'user' and template -%}\n {{- '<|im_start|>' + message['role'] -}}\n {{ '\n# Template:' }}\n {{- '\n' + template }}\n {% if examples %}\n {{- '# Examples:' }}\n {% for example in examples %}\n {{- '## Input:\n' }}\n {{- example['input'] + '\n' }}\n {{- '## Output:\n' }}\n {{- example['output'] | trim }}\n {% endfor %}\n {%- endif %}\n {{- '# Context:' }}\n {% if message['content'] is string %}\n {{- message['content'] | trim }}\n {% else %}\n {% for content in message['content'] %}\n {%- if content is string %}\n {{- content | trim }}\n {%- elif content['type'] == 'text' %}\n {{- content['text'] | trim }}\n {%- endif %}\n {% endfor %}\n {% endif %}\n {{- '<|im_end|> '}}\n {% else %}\n {{- '<|im_start|>' + message['role'] }}\n {% if message['content'] is string %}\n {{- message['content'] | trim }}\n {% else %}\n {% for content in message['content'] %}\n {%- if content is string %}\n {{- content | trim }}\n {%- elif content['type'] == 'text' %}\n {{- content['text'] | trim }}\n {%- endif %}\n {% endfor %}\n {% endif %}\n {{- '<|im_end|> '}}\n {% endif %}\n{% endfor -%}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant' }}\n{% endif -%}\n",
173
  "clean_up_tokenization_spaces": false,
174
  "eos_token": "</s>",
175
  "extra_special_tokens": {},
 
177
  "pad_token": "</s>",
178
  "tokenizer_class": "InternLM2Tokenizer",
179
  "unk_token": "<unk>"
180
+ }