Spaces:
Runtime error
Runtime error
File size: 3,396 Bytes
a380611 f7a02b6 453dbcd a380611 7abc31a d863fc7 f7a02b6 15e2bf5 453dbcd 8adb407 0aeece4 7abc31a 8adb407 453dbcd 8adb407 453dbcd 7abc31a d863fc7 f7a02b6 553aebf 5f278b8 f7a02b6 553aebf f7a02b6 d92408a 453dbcd 0aeece4 f7a02b6 5f278b8 0aeece4 f7a02b6 7f8eac7 7abc31a f7a02b6 d92408a 453dbcd d92408a cc82464 0a278b8 3fb3843 f7a02b6 0a278b8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 |
import openai
import transformers
import gradio as gr
# Set up the OpenAI API client
openai.api_key = "YOUR_API_KEY"
# Define the chat function for OpenAI API
def openai_chat(api_key, model, message):
# Check if an API key has been provided
if api_key is None:
return "Please enter your OpenAI API key and try again."
# Set up the OpenAI API request
response = openai.Completion.create(
engine=model,
prompt=message,
max_tokens=1024,
n=1,
stop=None,
temperature=0.5,
api_key=api_key,
)
# Extract the bot's response from the API request
bot_response = response.choices[0].text.strip()
return bot_response
# Define the chat function for Hugging Face API
def hf_chat(model_name, message):
# Load the model and tokenizer
model = transformers.pipeline("text-generation", model=model_name)
# Generate a response from the model
bot_response = model(message, max_length=1024, do_sample=True, temperature=0.7)[0]["generated_text"]
return bot_response
# Define the Gradio interface for chatbot
api_key_input = gr.inputs.Textbox(label="OpenAI API Key", default=None)
model_input = gr.inputs.Dropdown(
label="Select OpenAI model",
choices=["davinci", "curie", "babbage"],
default="davinci",
)
hf_model_input = gr.inputs.Dropdown(
label="Select Hugging Face model",
choices=["microsoft/DialoGPT-large", "Salesforce/codegen-2B-multi", "microsoft/DialoGPT-small"],
default="microsoft/DialoGPT-large",
)
mode_input = gr.inputs.Dropdown(
label="Select chatbot mode",
choices=["OpenAI", "Hugging Face"],
default="OpenAI",
)
message_input = gr.inputs.Textbox(label="Enter your message here")
output = gr.outputs.Textbox(label="Bot response")
# Define the chat window
chat_window = []
def chatbot(chat_window, message, mode, model, hf_model, api_key):
if message == "/clear":
chat_window.clear()
return "Chat history cleared."
if message:
if mode == "Hugging Face":
bot_response = hf_chat(hf_model, message)
else:
bot_response = openai_chat(api_key, model, message)
chat_window.append(("User", message))
chat_window.append(("Bot", bot_response))
return "\n".join([f"{name}: {text}" for name, text in chat_window])
# Define the Gradio interface for chatbot
chat_interface = gr.Interface(
fn=chatbot,
inputs=[message_input, mode_input, model_input, hf_model_input, api_key_input],
outputs=output,
title="Chatbot",
description="Enter your message below to chat with an AI",
theme="compact",
allow_flagging=False,
allow_screenshot=False,
allow_share=False,
)
# Add a clear button to the chat window
clear_button = gr.Interface(
fn=lambda: chat_window.clear(),
inputs=None,
outputs=gr.outputs.Textbox(label="Chat history cleared."),
title="Clear Chat History",
description="Click to clear the chat history.",
theme="compact",
allow_flagging=False,
allow_screenshot=False,
allow_share=False,
)
# Combine the chat interface and clear button into a single page
page = gr.Interface(
[chat_interface, clear_button],
title="Chatbot",
description="Enter your message below to chat with an AI",
theme="compact",
layout="horizontal",
)
# Launch the page
page.launch() |