ajwthompson's picture
Update app.py
95be932
raw
history blame
1.79 kB
import os
from typing import Optional, Tuple
from threading import Lock
import pickle
import gradio as gr
from query_data import get_chain
class ChatWrapper:
def __init__(self):
self.lock = Lock()
def set_openai_api_key(self, api_key: str):
with open("vectorstore.pkl", "rb") as f:
vectorstore = pickle.load(f)
"""Set the api key and return chain.
If no api_key, then None is returned.
"""
if api_key:
os.environ["OPENAI_API_KEY"] = api_key
chain = get_chain(vectorstore)
os.environ["OPENAI_API_KEY"] = ""
return chain
def __call__(self, inp: str, history: Optional[Tuple[str, str]]):
self.lock.acquire()
api_key = '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'
chain = self.set_openai_api_key(api_key)
try:
with open("vectorstore.pkl", "rb") as f:
vectorstore = pickle.load(f)
qa_chain = get_chain(vectorstore)
chat_history = []
print("Chat with your docs!")
while True:
print("Human:")
question = input()
result = qa_chain({"question": question, "chat_history": chat_history})
chat_history.append((question, result["answer"]))
print("AI:")
print(result["answer"])
chatResult = (output, history)
except Exception as e:
raise e
finally:
self.lock.release()
return chatResult
chat = ChatWrapper()
state = gr.outputs.State()
gradio_interface = gr.Interface(chat, inputs=["text", state], outputs=["text", state])
gradio_interface.launch(debug=True)