File size: 3,206 Bytes
a380611
f7a02b6
453dbcd
a380611
7abc31a
 
d863fc7
f7a02b6
 
15e2bf5
 
 
 
453dbcd
8adb407
0aeece4
7abc31a
8adb407
453dbcd
 
 
 
8adb407
453dbcd
 
 
 
7abc31a
d863fc7
f7a02b6
 
553aebf
 
f7a02b6
 
553aebf
f7a02b6
 
 
7abc31a
453dbcd
0aeece4
f7a02b6
52ec872
 
0aeece4
453dbcd
7abc31a
a380611
f7a02b6
 
0aeece4
453dbcd
 
f7a02b6
 
 
 
 
 
 
 
7abc31a
f7a02b6
 
7abc31a
 
f7a02b6
7abc31a
f7a02b6
 
 
 
 
 
d863fc7
453dbcd
 
 
 
 
cc82464
f7a02b6
7abc31a
3fb3843
 
f7a02b6
 
 
 
 
 
 
3fb3843
 
 
f7a02b6
3fb3843
f7a02b6
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
import openai
import transformers
import gradio as gr

# Set up the OpenAI API client
openai.api_key = "YOUR_API_KEY"

# Define the chat function for OpenAI API
def openai_chat(api_key, model, message):
    # Check if an API key has been provided
    if api_key is None:
        return "Please enter your OpenAI API key and try again."
    
    # Set up the OpenAI API request
    response = openai.Completion.create(
        engine=model,
        prompt=message,
        max_tokens=1024,
        n=1,
        stop=None,
        temperature=0.5,
        api_key=api_key,
    )
    
    # Extract the bot's response from the API request
    bot_response = response.choices[0].text.strip()
    
    return bot_response

# Define the chat function for Hugging Face API
def hf_chat(model_name, message):
    # Load the model and tokenizer
    model = transformers.pipeline("text2text-generation", model=model_name)
    
    # Generate a response from the model
    bot_response = model(message, max_length=1024, do_sample=True, temperature=0.7)[0]["generated_text"]
    
    return bot_response

# Define the Gradio interface for OpenAI chatbot
api_key_input = gr.inputs.Textbox(label="OpenAI API Key", default=None)
model_input = gr.inputs.Dropdown(
    label="Select OpenAI model",
    choices=["text-davinci-003", "text-davinci-002"],
    default="text-davinci-003",
)
message_input = gr.inputs.Textbox(label="Enter your message here")
output = gr.outputs.Textbox(label="Bot response")

openai_chat_button = gr.Interface(
    fn=openai_chat,
    inputs=[api_key_input, model_input, message_input],
    outputs=output,
    title="OpenAI Chatbot",
    description="Enter your message below to chat with an OpenAI AI",
    theme="compact",
    layout="vertical",
    allow_flagging=False,
    allow_screenshot=False,
    allow_share=False,
)

# Define the Gradio interface for Hugging Face chatbot
hf_model_input = gr.inputs.Dropdown(
    label="Select Hugging Face model",
    choices=["microsoft/DialoGPT-large", "microsoft/DialoGPT-medium", "microsoft/DialoGPT-small"],
    default="microsoft/DialoGPT-large",
)

hf_chat_button = gr.Interface(
    fn=hf_chat,
    inputs=[hf_model_input, message_input],
    outputs=output,
    title="Hugging Face Chatbot",
    description="Enter your message below to chat with a Hugging Face AI",
    theme="compact",
    layout="vertical",
    allow_flagging=False,
    allow_screenshot=False,
    allow_share=False,
)

chat_button = gr.Interface(
    fn=lambda message, model, hf_model, api_key: hf_chat(hf_model, message) if hf_model else openai_chat(api_key, model, message),
    inputs=[message_input, model_input, hf_model_input, api_key_input],
    outputs=output,
    title="Chatbot",
    description="Enter your message below to chat with an AI",
    theme="compact",
    allow_flagging=False,
    allow_screenshot=False,
    allow_share=False,
    examples=[
        ["Hello, how are you?", "", "", None],
        ["What's the weather like today?", "", "", None],
        ["Can you help me with some Python code?", "```python\nfor i in range(10):\n    print(i)\n```", "", None],
    ],
    live=False,
)

chat_button.test_launch()  # Launch the Gradio interface