Spaces:
Sleeping
Sleeping
Commit
·
9e8cfa1
1
Parent(s):
63a6a65
first commit
Browse files- app.py +64 -0
- requirements.txt +5 -0
app.py
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import streamlit as st
|
| 2 |
+
import pinecone
|
| 3 |
+
from makechain import get_chain
|
| 4 |
+
from langchain.vectorstores.pinecone import Pinecone
|
| 5 |
+
from env import PINECONE_INDEX_NAME, PINECONE_ENVIRONMENT, PINECONE_API_KEY, OPENAI_API_KEY
|
| 6 |
+
from langchain.embeddings.openai import OpenAIEmbeddings
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
st.title("Ask the Black@Stanford Exhibit")
|
| 10 |
+
st.sidebar.header("You can ask questions of interviews with Black Stanford students and faculty from the University "
|
| 11 |
+
"Archives")
|
| 12 |
+
st.sidebar.info(
|
| 13 |
+
'''This is a web application that allows you to interact with
|
| 14 |
+
the Stanford Archives.
|
| 15 |
+
Enter a **Question** in the **text box** and **press enter** to receive
|
| 16 |
+
a **response** from our ChatBot.
|
| 17 |
+
'''
|
| 18 |
+
)
|
| 19 |
+
|
| 20 |
+
# create Vectorstore
|
| 21 |
+
pinecone.init(
|
| 22 |
+
api_key=st.secrets["PINECONE_API_KEY"], # find at app.pinecone.io
|
| 23 |
+
environment=st.secrets["PINECONE_ENVIRONMENT"] # next to api key in console
|
| 24 |
+
)
|
| 25 |
+
index = pinecone.Index(index_name=st.secrets["PINECONE_INDEX_NAME"])
|
| 26 |
+
embed = OpenAIEmbeddings(openai_api_key=st.secrets["OPENAI_API_KEY"])
|
| 27 |
+
text_field = "text"
|
| 28 |
+
vectorStore = Pinecone(
|
| 29 |
+
index, embed.embed_query, text_field
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
# create chain
|
| 33 |
+
qa_chain = get_chain(vectorStore)
|
| 34 |
+
|
| 35 |
+
def main():
|
| 36 |
+
global query
|
| 37 |
+
|
| 38 |
+
user_query= st.text_input("Enter your question here")
|
| 39 |
+
if user_query != ":q" or user_query != "":
|
| 40 |
+
# Pass the query to the ChatGPT function
|
| 41 |
+
query = user_query.strip().replace('\n', ' ')
|
| 42 |
+
response = qa_chain(
|
| 43 |
+
{
|
| 44 |
+
'question': query,
|
| 45 |
+
}
|
| 46 |
+
)
|
| 47 |
+
st.write(f"{response['answer']}")
|
| 48 |
+
st.write("Sources: ")
|
| 49 |
+
st.write(f"{response['sources']}")
|
| 50 |
+
|
| 51 |
+
try:
|
| 52 |
+
main()
|
| 53 |
+
except Exception as e:
|
| 54 |
+
st.write("An error occurred while running the application: ", e)
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
|
requirements.txt
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
streamlit~=1.22.0
|
| 2 |
+
pinecone-client
|
| 3 |
+
langchain~=0.0.177
|
| 4 |
+
openai~=0.27.7
|
| 5 |
+
tqdm~=4.65.0
|