Spaces:
Runtime error
Runtime error
Eric Michael Martinez
commited on
Commit
·
e84431c
1
Parent(s):
d8a4145
update
Browse files- app.py +30 -20
- requirements.txt +1 -0
app.py
CHANGED
@@ -1,18 +1,33 @@
|
|
1 |
import gradio as gr
|
2 |
import openai
|
3 |
import examples as chatbot_examples
|
|
|
|
|
4 |
|
5 |
-
|
6 |
-
openai.api_base=openai_api_base
|
7 |
-
openai.api_key=openai_api_key
|
8 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
# Define a function to get the AI's reply using the OpenAI API
|
10 |
-
def get_ai_reply(model, system_message,
|
11 |
-
# Initialize the messages list
|
12 |
-
messages = [
|
13 |
|
14 |
-
# Add the
|
15 |
-
|
|
|
|
|
|
|
|
|
|
|
16 |
|
17 |
# Add the user's message to the messages list
|
18 |
messages += [{"role": "user", "content": message}]
|
@@ -20,11 +35,12 @@ def get_ai_reply(model, system_message, message, history_state):
|
|
20 |
# Make an API call to the OpenAI ChatCompletion endpoint with the model and messages
|
21 |
completion = openai.ChatCompletion.create(
|
22 |
model=model,
|
23 |
-
messages=messages
|
|
|
24 |
)
|
25 |
|
26 |
# Extract and return the AI's response from the API response
|
27 |
-
return completion.choices[0].message.content
|
28 |
|
29 |
# Define a function to handle the chat interaction with the AI model
|
30 |
def chat(model, system_message, message, chatbot_messages, history_state):
|
@@ -34,11 +50,10 @@ def chat(model, system_message, message, chatbot_messages, history_state):
|
|
34 |
|
35 |
# Try to get the AI's reply using the get_ai_reply function
|
36 |
try:
|
37 |
-
ai_reply = get_ai_reply(model, system_message,
|
38 |
except Exception as e:
|
39 |
-
# If an error occurs,
|
40 |
-
|
41 |
-
return None, chatbot_messages, history_state
|
42 |
|
43 |
# Append the user's message and the AI's reply to the chatbot_messages list
|
44 |
chatbot_messages.append((message, ai_reply))
|
@@ -99,14 +114,9 @@ def get_chatbot_app(additional_examples=[]):
|
|
99 |
example_load_btn.click(choose_example, inputs=[example_dropdown], outputs=[system_message, message, chatbot, history_state])
|
100 |
# Connect the send button to the chat function
|
101 |
btn.click(chat, inputs=[model_selector, system_message, message, chatbot, history_state], outputs=[message, chatbot, history_state])
|
102 |
-
with gr.Tab("Settings"):
|
103 |
-
openai_api_base = gr.Textbox(label="OpenAI API Base", value=openai.api_base)
|
104 |
-
openai_api_key = gr.Textbox(label="OpenAI API Key", type="password", value=openai.api_key)
|
105 |
-
save_settings_btn = gr.Button(value="Save")
|
106 |
-
save_settings_btn.click(save_settings, inputs=[openai_api_base, openai_api_key])
|
107 |
# Return the app
|
108 |
return app
|
109 |
|
110 |
# Call the launch_chatbot function to start the chatbot interface using Gradio
|
111 |
# Set the share parameter to False, meaning the interface will not be publicly accessible
|
112 |
-
get_chatbot_app().launch()
|
|
|
1 |
import gradio as gr
|
2 |
import openai
|
3 |
import examples as chatbot_examples
|
4 |
+
from dotenv import load_dotenv
|
5 |
+
import os
|
6 |
|
7 |
+
load_dotenv() # take environment variables from .env.
|
|
|
|
|
8 |
|
9 |
+
# In order to authenticate, secrets must have been set, and the user supplied credentials match
|
10 |
+
def auth(username, password):
|
11 |
+
app_username = os.getenv("APP_USERNAME")
|
12 |
+
app_password = os.getenv("APP_PASSWORD")
|
13 |
+
|
14 |
+
if app_username and app_password:
|
15 |
+
return username == app_username and password == app_password
|
16 |
+
|
17 |
+
return False
|
18 |
+
|
19 |
# Define a function to get the AI's reply using the OpenAI API
|
20 |
+
def get_ai_reply(message, model="gpt-3.5-turbo", system_message=None, temperature=0, message_history=[]):
|
21 |
+
# Initialize the messages list
|
22 |
+
messages = []
|
23 |
|
24 |
+
# Add the system message to the messages list
|
25 |
+
if system_message is not None:
|
26 |
+
messages += [{"role": "system", "content": system_message}]
|
27 |
+
|
28 |
+
# Add the message history to the messages list
|
29 |
+
if message_history is not None:
|
30 |
+
messages += message_history
|
31 |
|
32 |
# Add the user's message to the messages list
|
33 |
messages += [{"role": "user", "content": message}]
|
|
|
35 |
# Make an API call to the OpenAI ChatCompletion endpoint with the model and messages
|
36 |
completion = openai.ChatCompletion.create(
|
37 |
model=model,
|
38 |
+
messages=messages,
|
39 |
+
temperature=temperature
|
40 |
)
|
41 |
|
42 |
# Extract and return the AI's response from the API response
|
43 |
+
return completion.choices[0].message.content.strip()
|
44 |
|
45 |
# Define a function to handle the chat interaction with the AI model
|
46 |
def chat(model, system_message, message, chatbot_messages, history_state):
|
|
|
50 |
|
51 |
# Try to get the AI's reply using the get_ai_reply function
|
52 |
try:
|
53 |
+
ai_reply = get_ai_reply(message, model=model, system_message=system_message, message_history=history_state)
|
54 |
except Exception as e:
|
55 |
+
# If an error occurs, raise a Gradio error
|
56 |
+
raise gr.Error(e)
|
|
|
57 |
|
58 |
# Append the user's message and the AI's reply to the chatbot_messages list
|
59 |
chatbot_messages.append((message, ai_reply))
|
|
|
114 |
example_load_btn.click(choose_example, inputs=[example_dropdown], outputs=[system_message, message, chatbot, history_state])
|
115 |
# Connect the send button to the chat function
|
116 |
btn.click(chat, inputs=[model_selector, system_message, message, chatbot, history_state], outputs=[message, chatbot, history_state])
|
|
|
|
|
|
|
|
|
|
|
117 |
# Return the app
|
118 |
return app
|
119 |
|
120 |
# Call the launch_chatbot function to start the chatbot interface using Gradio
|
121 |
# Set the share parameter to False, meaning the interface will not be publicly accessible
|
122 |
+
get_chatbot_app().launch(auth=auth)
|
requirements.txt
CHANGED
@@ -1,2 +1,3 @@
|
|
1 |
gradio == 3.11.0
|
2 |
openai == 0.27.4
|
|
|
|
1 |
gradio == 3.11.0
|
2 |
openai == 0.27.4
|
3 |
+
python-dotenv == 1.0.0
|