Spaces:
Runtime error
Runtime error
add function infra
Browse files- app.py +76 -21
- functions.py +48 -0
- requirements.txt +2 -1
app.py
CHANGED
|
@@ -1,45 +1,100 @@
|
|
| 1 |
import streamlit as st
|
| 2 |
import os
|
| 3 |
from openai import AzureOpenAI
|
| 4 |
-
|
|
|
|
| 5 |
|
| 6 |
st.title("Support Chat UI")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 7 |
|
| 8 |
client = AzureOpenAI(
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
)
|
| 13 |
|
| 14 |
if "openai_model" not in st.session_state:
|
| 15 |
st.session_state["openai_model"] = "gpt-35-turbo"
|
| 16 |
|
| 17 |
if "messages" not in st.session_state:
|
| 18 |
-
st.session_state.messages = [
|
|
|
|
|
|
|
| 19 |
|
| 20 |
for message in st.session_state.messages:
|
| 21 |
-
|
| 22 |
-
st.
|
|
|
|
| 23 |
|
| 24 |
-
if prompt := st.chat_input("
|
| 25 |
st.session_state.messages.append({"role": "user", "content": prompt})
|
| 26 |
with st.chat_message("user"):
|
| 27 |
st.markdown(prompt)
|
| 28 |
|
| 29 |
-
with st.chat_message("assistant"):
|
| 30 |
message_placeholder = st.empty()
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 36 |
{"role": m["role"], "content": m["content"]}
|
| 37 |
for m in st.session_state.messages
|
| 38 |
],
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
|
| 43 |
-
|
| 44 |
-
|
| 45 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import streamlit as st
|
| 2 |
import os
|
| 3 |
from openai import AzureOpenAI
|
| 4 |
+
|
| 5 |
+
from functions import call_function
|
| 6 |
|
| 7 |
st.title("Support Chat UI")
|
| 8 |
+
# when will my order be delivered?, [email protected] W123123
|
| 9 |
+
|
| 10 |
+
functions = [
|
| 11 |
+
{
|
| 12 |
+
"name": "order_tracking_status",
|
| 13 |
+
"description": "Retrieves the status of an order based on **both** the email address and order number.",
|
| 14 |
+
"parameters": {
|
| 15 |
+
"type": "object",
|
| 16 |
+
"properties": {
|
| 17 |
+
"email_address": {
|
| 18 |
+
"type": "string",
|
| 19 |
+
"description": "The email address associated with the order"
|
| 20 |
+
},
|
| 21 |
+
"order_number": {
|
| 22 |
+
"type": "string",
|
| 23 |
+
"description": "The order number. For online orders, this starts with either the letter W or C. "
|
| 24 |
+
"For in-store orders, this starts with the letter H. You can find your order "
|
| 25 |
+
"number in your confirmation email or your receipt."
|
| 26 |
+
},
|
| 27 |
+
},
|
| 28 |
+
"required": ["email_address, order_number"]
|
| 29 |
+
}
|
| 30 |
+
}
|
| 31 |
+
]
|
| 32 |
|
| 33 |
client = AzureOpenAI(
|
| 34 |
+
api_key=os.environ['OPENAI_API_KEY'],
|
| 35 |
+
api_version="2023-07-01-preview",
|
| 36 |
+
azure_endpoint=os.environ['AZURE_ENDPOINT'],
|
| 37 |
)
|
| 38 |
|
| 39 |
if "openai_model" not in st.session_state:
|
| 40 |
st.session_state["openai_model"] = "gpt-35-turbo"
|
| 41 |
|
| 42 |
if "messages" not in st.session_state:
|
| 43 |
+
st.session_state.messages = [{"role": "system", "content": "You are a helpful customer support agent for "
|
| 44 |
+
"The Home Depot. Be as helpful as possible and call "
|
| 45 |
+
"functions when necessary."},]
|
| 46 |
|
| 47 |
for message in st.session_state.messages:
|
| 48 |
+
if message["role"] == "assistant" or message["role"] == "user":
|
| 49 |
+
with st.chat_message(message["role"]):
|
| 50 |
+
st.markdown(message["content"])
|
| 51 |
|
| 52 |
+
if prompt := st.chat_input("How can we help you today?"):
|
| 53 |
st.session_state.messages.append({"role": "user", "content": prompt})
|
| 54 |
with st.chat_message("user"):
|
| 55 |
st.markdown(prompt)
|
| 56 |
|
| 57 |
+
with st.chat_message("assistant", avatar="🏠"): # avatar=st.image('Home-Depot-Logo.png', width=50)):
|
| 58 |
message_placeholder = st.empty()
|
| 59 |
+
full_message = ""
|
| 60 |
+
func_call = {
|
| 61 |
+
"name": None,
|
| 62 |
+
"arguments": "",
|
| 63 |
+
}
|
| 64 |
+
|
| 65 |
+
called_function = True
|
| 66 |
+
while called_function:
|
| 67 |
+
called_function = False
|
| 68 |
+
for response in client.chat.completions.create(
|
| 69 |
+
model=st.session_state["openai_model"],
|
| 70 |
+
messages=[
|
| 71 |
+
{"role": m["role"], "content": m["content"], "name": m["name"]} if "name" in m else
|
| 72 |
{"role": m["role"], "content": m["content"]}
|
| 73 |
for m in st.session_state.messages
|
| 74 |
],
|
| 75 |
+
functions=functions,
|
| 76 |
+
function_call="auto",
|
| 77 |
+
stream=True,
|
| 78 |
+
):
|
| 79 |
+
if len(response.choices) > 0:
|
| 80 |
+
delta = response.choices[0].delta
|
| 81 |
+
|
| 82 |
+
full_message += (delta.content or "")
|
| 83 |
+
if delta.function_call is not None:
|
| 84 |
+
if delta.function_call.name is not None:
|
| 85 |
+
func_call["name"] = delta.function_call.name
|
| 86 |
+
if delta.function_call.arguments is not None:
|
| 87 |
+
func_call["arguments"] += delta.function_call.arguments
|
| 88 |
+
if response.choices[0].finish_reason == "function_call" and func_call["name"] is not None:
|
| 89 |
+
print(f"Function generation requested, calling function")
|
| 90 |
+
function_response = call_function(st.session_state.messages, func_call)
|
| 91 |
+
print("function response")
|
| 92 |
+
print(function_response)
|
| 93 |
+
st.session_state.messages.append(function_response)
|
| 94 |
+
called_function = True
|
| 95 |
+
|
| 96 |
+
message_placeholder.markdown(full_message + "▌")
|
| 97 |
+
|
| 98 |
+
message_placeholder.markdown(full_message)
|
| 99 |
+
|
| 100 |
+
st.session_state.messages.append({"role": "assistant", "content": full_message})
|
functions.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def call_function(messages, function_call):
|
| 5 |
+
"""Function calling function which executes function calls when the model believes it is necessary.
|
| 6 |
+
Currently extended by adding clauses to this if statement."""
|
| 7 |
+
print(function_call)
|
| 8 |
+
|
| 9 |
+
if function_call["name"] == "order_tracking_status":
|
| 10 |
+
try:
|
| 11 |
+
parsed_output = json.loads(
|
| 12 |
+
function_call["arguments"]
|
| 13 |
+
)
|
| 14 |
+
print("Looking up order status")
|
| 15 |
+
results = get_order_tracking_status(parsed_output["email_address"], parsed_output["order_number"])
|
| 16 |
+
return {
|
| 17 |
+
"role": "function",
|
| 18 |
+
"name": function_call["name"],
|
| 19 |
+
"content": str(results),
|
| 20 |
+
}
|
| 21 |
+
except Exception as e:
|
| 22 |
+
# print(parsed_output)
|
| 23 |
+
print(f"Function execution failed")
|
| 24 |
+
print(f"Error message: {e}")
|
| 25 |
+
return {"role": "function", "content": "call failed", "name": "order_tracking_status"}
|
| 26 |
+
# try:
|
| 27 |
+
# print("Got search results, summarizing content")
|
| 28 |
+
# response = chat_completion_request(messages)
|
| 29 |
+
# return response.json()
|
| 30 |
+
# except Exception as e:
|
| 31 |
+
# print(type(e))
|
| 32 |
+
# raise Exception("Function chat request failed")
|
| 33 |
+
|
| 34 |
+
# elif (
|
| 35 |
+
# full_message["message"]["function_call"]["name"] == "read_article_and_summarize"
|
| 36 |
+
# ):
|
| 37 |
+
# parsed_output = json.loads(
|
| 38 |
+
# full_message["message"]["function_call"]["arguments"]
|
| 39 |
+
# )
|
| 40 |
+
# print("Finding and reading paper")
|
| 41 |
+
# summary = summarize_text(parsed_output["query"])
|
| 42 |
+
# return summary
|
| 43 |
+
|
| 44 |
+
else:
|
| 45 |
+
raise Exception("Function does not exist and cannot be called")
|
| 46 |
+
|
| 47 |
+
def get_order_tracking_status(email_address, order_number):
|
| 48 |
+
return "The order was delivered on Monday, November 12th"
|
requirements.txt
CHANGED
|
@@ -1 +1,2 @@
|
|
| 1 |
-
openai==1.3.3
|
|
|
|
|
|
| 1 |
+
openai==1.3.3
|
| 2 |
+
streamlit~=1.28.2
|