|
import os |
|
from typing import Optional, Tuple |
|
from threading import Lock |
|
import pickle |
|
|
|
import gradio as gr |
|
from query_data import get_chain |
|
|
|
|
|
|
|
class ChatWrapper: |
|
def __init__(self): |
|
self.lock = Lock() |
|
|
|
|
|
def __call__(self, inp: str, history: Optional[Tuple[str, str]]): |
|
self.lock.acquire() |
|
api_key = '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' |
|
|
|
try: |
|
|
|
|
|
|
|
|
|
os.environ["OPENAI_API_KEY"] = api_key |
|
|
|
|
|
|
|
|
|
with open("vectorstore.pkl", "rb") as f: |
|
vectorstore = pickle.load(f) |
|
|
|
qa_chain = get_chain(vectorstore) |
|
chat_history = [] |
|
print("Chat with your docs!") |
|
while True: |
|
print("Human:") |
|
question = inp |
|
output = qa_chain({"question": question, "chat_history": chat_history}) |
|
chat_history.append((question, output["answer"])) |
|
print("AI:") |
|
print(result["answer"]) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
chatResult = (output, chat_history) |
|
|
|
except Exception as e: |
|
raise e |
|
finally: |
|
self.lock.release() |
|
return chatResult |
|
|
|
chat = ChatWrapper() |
|
state = gr.State() |
|
|
|
gradio_interface = gr.Interface(chat, inputs=["text", state], outputs=["text", state]) |
|
gradio_interface.launch(debug=True) |
|
|