Spaces:
Runtime error
Runtime error
import gradio as gr | |
import spaces | |
import torch | |
import transformers | |
import torch | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
model_name = "meta-llama/Meta-Llama-3-8B-Instruct" | |
pipeline = transformers.pipeline( | |
"text-generation", | |
model=model_name, | |
model_kwargs={"torch_dtype": torch.bfloat16}, | |
device="cuda", | |
) | |
def chat_function(message, history): | |
messages = [ | |
{"role": "system", "content": "You are a helpful assistant!"}, | |
{"role": "user", "content": message}, | |
] | |
prompt = pipeline.tokenizer.apply_chat_template( | |
messages, | |
tokenize=False, | |
add_generation_prompt=True | |
) | |
terminators = [ | |
pipeline.tokenizer.eos_token_id, | |
pipeline.tokenizer.convert_tokens_to_ids("<|eot_id|>") | |
] | |
outputs = pipeline( | |
prompt, | |
max_new_tokens=256, | |
eos_token_id=terminators, | |
do_sample=True, | |
temperature=0.6, | |
top_p=0.9, | |
) | |
return outputs[0]["generated_text"][len(prompt):] | |
gr.ChatInterface( | |
chat_function, | |
chatbot=gr.Chatbot(height=300), | |
textbox=gr.Textbox(placeholder="Enter message here", container=False, scale=7), | |
title="LLAMA 3 8B Chat", | |
description="Ask Yes Man any question", | |
theme="soft", | |
retry_btn=None, | |
undo_btn="Delete Previous", | |
clear_btn="Clear", | |
).launch() |