Spaces:
Runtime error
Runtime error
# Import required libraries | |
import gradio as gr | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
# Load the model and tokenizer | |
MODEL_NAME = "SeaLLMs/SeaLLM-7B-v2.5" | |
# Download model and tokenizer from Hugging Face | |
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME) | |
model = AutoModelForCausalLM.from_pretrained(MODEL_NAME, torch_dtype="auto", device_map="auto") | |
# Define the chatbot function | |
def chatbot(user_input): | |
inputs = tokenizer(user_input, return_tensors="pt").to("cuda") | |
outputs = model.generate(inputs["input_ids"], max_length=150, num_return_sequences=1, temperature=0.7) | |
response = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
return response | |
# Create a Gradio interface | |
interface = gr.Interface( | |
fn=chatbot, | |
inputs="text", | |
outputs="text", | |
title="SeaLLM Chatbot", | |
description="A chatbot powered by SeaLLM-7B-v2.5.", | |
examples=["Hello!", "What's the weather today?", "Tell me a joke!"], | |
) | |
# Launch the interface | |
if __name__ == "__main__": | |
interface.launch() | |