Spaces:
Sleeping
Sleeping
import gradio as gr | |
import torch | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
# Load the DialoGPT model and tokenizer | |
model_name = "Astrea/DialoGPT-small-akari" | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained(model_name) | |
# Define the Gradio interface | |
akari = gr.Interface( | |
fn=lambda messages: { | |
"response": model.generate( | |
tokenizer.encode(messages["input"], return_tensors="pt"), | |
max_length=50, | |
num_beams=5, | |
no_repeat_ngram_size=2, | |
top_k=50, | |
top_p=0.95, | |
temperature=0.7, | |
)[0].decode("utf-8") | |
}, | |
inputs=gr.Textbox(), | |
outputs=gr.Textbox(), | |
) | |
akari.launch() |