huohguohbo's picture
Update app.py
553aebf
raw
history blame
4.08 kB
import re
import openai
import transformers
import gradio as gr
# Define a regular expression to match Python code blocks
code_pattern = re.compile(r"```python\n(.*?)\n```", re.DOTALL)
# Define the chat function for OpenAI API
def openai_chat(api_key, model, message):
# Check if an API key has been provided
if api_key is None:
return "Please enter your OpenAI API key and try again."
# Extract code blocks from the message
code_blocks = extract_code_blocks(message)
# Replace code blocks with highlighted versions
highlighted_message = message
for code in code_blocks:
highlighted_code = f'<span style="background-color: #FFFF00;">{code}</span>'
highlighted_message = re.sub(f'```python\n{code}\n```', highlighted_code, highlighted_message, flags=re.IGNORECASE)
# Set up the OpenAI API request
response = openai.Completion.create(
engine=model,
prompt=highlighted_message,
max_tokens=1024,
n=1,
stop=None,
temperature=0.5,
api_key=api_key,
)
# Extract the bot's response from the API request
bot_response = response.choices[0].text.strip()
# Highlight code blocks in the bot response
highlighted_bot_response = bot_response
for code in code_blocks:
highlighted_code = f'<span style="background-color: #FFFF00;">{code}</span>'
highlighted_bot_response = re.sub(f'{code}', highlighted_code, highlighted_bot_response, flags=re.IGNORECASE)
return highlighted_bot_response
# Define the chat function for Hugging Face API
def hf_chat(model_name, message):
# Load the model and tokenizer
model = transformers.pipeline("text2text-generation", model=model_name)
# Generate a response from the model
bot_response = model(message, max_length=1024, do_sample=True, temperature=0.7)[0]["generated_text"]
return bot_response
# Define a function to extract code blocks from a string
def extract_code_blocks(text):
code_blocks = []
for match in code_pattern.finditer(text):
code_blocks.append(match.group(1))
return code_blocks
# Define the Gradio interface
api_key_input = gr.inputs.Textbox(label="OpenAI API Key", default=None)
model_input = gr.inputs.Dropdown(
label="Select OpenAI model",
choices=["davinci", "davinci-002", "davinci-003"],
default="davinci-003",
)
message_input = gr.inputs.Textbox(label="Enter your message here")
output = gr.outputs.HTML(label="Bot response")
openai_chat_button = gr.Interface(
fn=openai_chat,
inputs=[api_key_input, model_input, message_input],
outputs=output,
title="OpenAI Chatbot",
description="Enter your message below to chat with an OpenAI AI",
theme="compact",
layout="vertical",
allow_flagging=False,
allow_screenshot=False,
allow_share=False,
)
hf_chat_models = ["microsoft/DialoGPT-large", "microsoft/DialoGPT-medium", "microsoft/DialoGPT-small"]
hf_model_input = gr.inputs.Dropdown(
label="Select Hugging Face model",
choices=hf_chat_models,
default=hf_chat_models[0],
)
hf_chat_button = gr.Interface(
fn=hf_chat,
inputs=[hf_model_input, message_input],
outputs=output,
title="Hugging Face Chatbot",
description="Enter your message below to chat with a Hugging Face AI",
theme="compact",
layout="vertical",
allow_flagging=False,
allow_screenshot=False,
allow_share=False,
)
chat_button = gr.Interface(
inputs=[message_input, model_input, hf_model_input, api_key_input],
outputs=output,
title="Chatbot",
description="Enter your message below to chat with an AI",
theme="compact",
allow_flagging=False,
allow_screenshot=False,
allow_share=False,
examples=[
["Hello, how are you?", "", "", None],
["What's the weather like today?", "", "", None],
["Can you help me with some Python code?", "```python\nfor i in range(10):\n print(i)\n```", "", None],
],
live=False,
)
chat_button.test_launch() # Launch the Gradio interface