File size: 1,954 Bytes
abc16ec 3ee66e5 abc16ec 863cee5 abc16ec 3ee66e5 40cb81b 3ee66e5 abc16ec 9ad3866 8daf606 abc16ec 95be932 f97920f 3164630 f97920f 8daf606 f97920f 95be932 8daf606 e960d9b 95be932 e960d9b 95be932 e960d9b 95be932 abc16ec 3ee66e5 abc16ec 936abfc e4e782c abc16ec 3ee66e5 ecec617 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 |
import os
from typing import Optional, Tuple
from threading import Lock
import pickle
import gradio as gr
from query_data import get_chain
class ChatWrapper:
def __init__(self):
self.lock = Lock()
def __call__(self, inp: str, history: Optional[Tuple[str, str]]):
self.lock.acquire()
api_key = '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'
#chain = self.set_openai_api_key(api_key)
try:
# with open("vectorstore.pkl", "rb") as f:
# vectorstore = pickle.load(f)
os.environ["OPENAI_API_KEY"] = api_key
# qa_chain = get_chain(vectorstore)
# print("Chat with your docs!")
with open("vectorstore.pkl", "rb") as f:
vectorstore = pickle.load(f)
qa_chain = get_chain(vectorstore)
chat_history = []
print("Chat with your docs!")
while True:
print("Human:")
question = inp
output = qa_chain({"question": question, "chat_history": chat_history})
chat_history.append((question, output["answer"]))
print("AI:")
print(result["answer"])
# while True:
# print("Human:")
# history = history or []
# output = qa_chain({"question": inp, "chat_history": history})["answer"]
#history.append((inp, output))
# print("AI:")
#print(output["answer"])
chatResult = (output, chat_history)
except Exception as e:
raise e
finally:
self.lock.release()
return chatResult
chat = ChatWrapper()
state = gr.State()
gradio_interface = gr.Interface(chat, inputs=["text", state], outputs=["text", state])
gradio_interface.launch(debug=True)
|