Spaces:
Sleeping
Sleeping
import os | |
import re | |
import streamlit as st | |
from dotenv import load_dotenv | |
import openai | |
from langsmith import traceable | |
# Load environment variables | |
load_dotenv() | |
api_key = os.getenv("OPENAI_API_KEY") | |
openai.api_key = api_key | |
# Helper function to remove citations | |
def remove_citation(text: str) -> str: | |
pattern = r"γ\d+β \w+γ" | |
return re.sub(pattern, "π", text) | |
# Initialize session state for messages and thread_id | |
if "messages" not in st.session_state: | |
st.session_state["messages"] = [] | |
if "thread_id" not in st.session_state: | |
st.session_state["thread_id"] = None | |
st.title("Solution Specifier A") | |
# Traceable function for predict logic | |
def get_response(user_input: str, thread_id: str = None): | |
""" | |
This function calls OpenAI API to get a response. | |
If thread_id is provided, it continues the conversation. | |
Otherwise, it starts a new conversation. | |
""" | |
messages = [{"role": "user", "content": user_input}] | |
if thread_id: | |
response = openai.ChatCompletion.create( | |
model="gpt-3.5-turbo", | |
messages=messages, | |
user=thread_id | |
) | |
else: | |
response = openai.ChatCompletion.create( | |
model="gpt-3.5-turbo", | |
messages=messages | |
) | |
return response["choices"][0]["message"]["content"], response["id"] | |
# Streamlit app logic | |
def predict(user_input: str) -> str: | |
if st.session_state["thread_id"] is None: | |
response_text, thread_id = get_response(user_input) | |
st.session_state["thread_id"] = thread_id | |
else: | |
response_text, _ = get_response(user_input, thread_id=st.session_state["thread_id"]) | |
return remove_citation(response_text) | |
# Display any existing messages (from a previous run or refresh) | |
for msg in st.session_state["messages"]: | |
if msg["role"] == "user": | |
with st.chat_message("user"): | |
st.write(msg["content"]) | |
else: | |
with st.chat_message("assistant"): | |
st.write(msg["content"]) | |
# Create the chat input widget at the bottom of the page | |
user_input = st.chat_input("Type your message here...") | |
# When the user hits ENTER on st.chat_input | |
if user_input: | |
# Add the user message to session state | |
st.session_state["messages"].append({"role": "user", "content": user_input}) | |
# Display the user's message | |
with st.chat_message("user"): | |
st.write(user_input) | |
# Get the assistant's response | |
response_text = predict(user_input) | |
# Add the assistant response to session state | |
st.session_state["messages"].append({"role": "assistant", "content": response_text}) | |
# Display the assistant's reply | |
with st.chat_message("assistant"): | |
st.write(response_text) |