Spaces:
Runtime error
Runtime error
import gradio as gr | |
from transformers import AutoTokenizer | |
tokenizer = AutoTokenizer.from_pretrained("HuggingFaceH4/zephyr-7b-beta") | |
demo_conversation1 = [ | |
{"role": "user", "content": "Hi there!"}, | |
{"role": "assistant", "content": "Hello, human!"} | |
] | |
demo_conversation2 = [ | |
{"role": "system", "content": "You are a helpful chatbot."}, | |
{"role": "user", "content": "Hi there!"} | |
] | |
default_template = """{% for message in messages %} | |
{{ "<|im_start|>" + message["role"] + \n + message["content"] + "<|im_end|>\n" }} | |
{% endfor %} | |
{% if add_generation_prompt %} | |
{{ "<|im_start|>assistant\n" }} | |
{% endif %}""" | |
conversations = [demo_conversation1, demo_conversation2] | |
def apply_chat_template(template): | |
tokenizer.chat_template = template | |
outputs = [] | |
for i, conversation in enumerate(conversations): | |
without_gen = tokenizer.apply_chat_template(conversation, tokenize=False) | |
with_gen = tokenizer.apply_chat_template(conversation, tokenize=False, add_generation_prompt=True) | |
out = f"Conversation {i} without generation prompt:\n\n{without_gen}\n\nConversation {i} with generation prompt:\n\n{with_gen}\n\n" | |
outputs.append(out) | |
return tuple(outputs) | |
iface = gr.Interface( | |
fn=apply_chat_template, | |
inputs=gr.TextArea(value=default_template, lines=10, max_lines=30, label="Chat Template"), | |
outputs=["text"] * len(conversations)) | |
iface.launch() | |