ajwthompson's picture
Update app.py
e4e782c
import os
from typing import Optional, Tuple
from threading import Lock
import pickle
import gradio as gr
from query_data import get_chain
class ChatWrapper:
def __init__(self):
self.lock = Lock()
def __call__(self, inp: str, history: Optional[Tuple[str, str]]):
self.lock.acquire()
api_key = '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'
#chain = self.set_openai_api_key(api_key)
try:
# with open("vectorstore.pkl", "rb") as f:
# vectorstore = pickle.load(f)
os.environ["OPENAI_API_KEY"] = api_key
# qa_chain = get_chain(vectorstore)
# print("Chat with your docs!")
with open("vectorstore.pkl", "rb") as f:
vectorstore = pickle.load(f)
qa_chain = get_chain(vectorstore)
chat_history = []
print("Chat with your docs!")
while True:
print("Human:")
question = inp
output = qa_chain({"question": question, "chat_history": chat_history})
chat_history.append((question, output["answer"]))
print("AI:")
print(result["answer"])
# while True:
# print("Human:")
# history = history or []
# output = qa_chain({"question": inp, "chat_history": history})["answer"]
#history.append((inp, output))
# print("AI:")
#print(output["answer"])
chatResult = (output, chat_history)
except Exception as e:
raise e
finally:
self.lock.release()
return chatResult
chat = ChatWrapper()
state = gr.State()
gradio_interface = gr.Interface(chat, inputs=["text", state], outputs=["text", state])
gradio_interface.launch(debug=True)