Spaces:
Sleeping
Sleeping
from transformers import pipeline | |
import gradio as gr | |
# Load the pipeline for text generation | |
try: | |
# generator = pipeline("text-generation", model="microsoft/DialoGPT-small") | |
generator = pipeline("text-generation", model="microsoft/DialoGPT-medium") | |
except Exception as e: | |
print(f"Error loading the model: {e}") | |
raise | |
# Function to generate a response | |
def dialoGPT_response(user_input, history): | |
try: | |
conversation = [{"role": "user", "content": user_input}] if history is None else history + [{"role": "user", "content": user_input}] | |
response = generator(conversation, return_full_text=False, max_length=1000) | |
assistant_response = response[0]['generated_text'] | |
new_history = conversation + [{"role": "assistant", "content": assistant_response}] | |
return assistant_response, new_history | |
except Exception as e: | |
print(f"Error generating response: {e}") | |
return "An error occurred while generating a response.", history | |
# Gradio interface | |
iface = gr.Interface( | |
fn=dialoGPT_response, | |
inputs=[ | |
gr.Textbox(placeholder="Enter your message..."), | |
"state" | |
], | |
outputs=[ | |
"text", | |
"state" | |
], | |
title="DialoGPT Chat", | |
description="Chat with DialoGPT-small model. Your conversation history is maintained.", | |
allow_flagging="never" | |
) | |
if __name__ == "__main__": | |
iface.launch(debug=True) |