File size: 4,083 Bytes
d863fc7
a380611
f7a02b6
453dbcd
a380611
d863fc7
 
 
f7a02b6
 
15e2bf5
 
 
 
d863fc7
 
 
 
 
 
 
f7a02b6
d863fc7
453dbcd
8adb407
0aeece4
d863fc7
8adb407
453dbcd
 
 
 
8adb407
453dbcd
 
 
 
d863fc7
 
 
 
f7a02b6
d863fc7
 
 
f7a02b6
 
553aebf
 
f7a02b6
 
553aebf
f7a02b6
 
 
d863fc7
 
 
 
 
 
a380611
453dbcd
 
0aeece4
f7a02b6
 
 
0aeece4
453dbcd
d863fc7
a380611
f7a02b6
 
0aeece4
453dbcd
 
f7a02b6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d863fc7
453dbcd
 
 
 
 
cc82464
f7a02b6
3fb3843
 
f7a02b6
 
 
 
 
 
 
3fb3843
 
 
f7a02b6
3fb3843
f7a02b6
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
import re
import openai
import transformers
import gradio as gr

# Define a regular expression to match Python code blocks
code_pattern = re.compile(r"```python\n(.*?)\n```", re.DOTALL)

# Define the chat function for OpenAI API
def openai_chat(api_key, model, message):
    # Check if an API key has been provided
    if api_key is None:
        return "Please enter your OpenAI API key and try again."
    
    # Extract code blocks from the message
    code_blocks = extract_code_blocks(message)
    
    # Replace code blocks with highlighted versions
    highlighted_message = message
    for code in code_blocks:
        highlighted_code = f'<span style="background-color: #FFFF00;">{code}</span>'
        highlighted_message = re.sub(f'```python\n{code}\n```', highlighted_code, highlighted_message, flags=re.IGNORECASE)
    
    # Set up the OpenAI API request
    response = openai.Completion.create(
        engine=model,
        prompt=highlighted_message,
        max_tokens=1024,
        n=1,
        stop=None,
        temperature=0.5,
        api_key=api_key,
    )
    
    # Extract the bot's response from the API request
    bot_response = response.choices[0].text.strip()
    
    # Highlight code blocks in the bot response
    highlighted_bot_response = bot_response
    for code in code_blocks:
        highlighted_code = f'<span style="background-color: #FFFF00;">{code}</span>'
        highlighted_bot_response = re.sub(f'{code}', highlighted_code, highlighted_bot_response, flags=re.IGNORECASE)
    
    return highlighted_bot_response

# Define the chat function for Hugging Face API
def hf_chat(model_name, message):
    # Load the model and tokenizer
    model = transformers.pipeline("text2text-generation", model=model_name)
    
    # Generate a response from the model
    bot_response = model(message, max_length=1024, do_sample=True, temperature=0.7)[0]["generated_text"]
    
    return bot_response

# Define a function to extract code blocks from a string
def extract_code_blocks(text):
    code_blocks = []
    for match in code_pattern.finditer(text):
        code_blocks.append(match.group(1))
    return code_blocks

# Define the Gradio interface
api_key_input = gr.inputs.Textbox(label="OpenAI API Key", default=None)
model_input = gr.inputs.Dropdown(
    label="Select OpenAI model",
    choices=["davinci", "davinci-002", "davinci-003"],
    default="davinci-003",
)
message_input = gr.inputs.Textbox(label="Enter your message here")
output = gr.outputs.HTML(label="Bot response")

openai_chat_button = gr.Interface(
    fn=openai_chat,
    inputs=[api_key_input, model_input, message_input],
    outputs=output,
    title="OpenAI Chatbot",
    description="Enter your message below to chat with an OpenAI AI",
    theme="compact",
    layout="vertical",
    allow_flagging=False,
    allow_screenshot=False,
    allow_share=False,
)

hf_chat_models = ["microsoft/DialoGPT-large", "microsoft/DialoGPT-medium", "microsoft/DialoGPT-small"]
hf_model_input = gr.inputs.Dropdown(
    label="Select Hugging Face model",
    choices=hf_chat_models,
    default=hf_chat_models[0],
)
hf_chat_button = gr.Interface(
    fn=hf_chat,
    inputs=[hf_model_input, message_input],
    outputs=output,
    title="Hugging Face Chatbot",
    description="Enter your message below to chat with a Hugging Face AI",
    theme="compact",
    layout="vertical",
    allow_flagging=False,
    allow_screenshot=False,
    allow_share=False,
)

chat_button = gr.Interface(
    inputs=[message_input, model_input, hf_model_input, api_key_input],
    outputs=output,
    title="Chatbot",
    description="Enter your message below to chat with an AI",
    theme="compact",
    allow_flagging=False,
    allow_screenshot=False,
    allow_share=False,
    examples=[
        ["Hello, how are you?", "", "", None],
        ["What's the weather like today?", "", "", None],
        ["Can you help me with some Python code?", "```python\nfor i in range(10):\n    print(i)\n```", "", None],
    ],
    live=False,
)

chat_button.test_launch()  # Launch the Gradio interface