|
import os |
|
from typing import Optional, Tuple |
|
|
|
import gradio as gr |
|
import pickle |
|
from query_data import get_chain |
|
from threading import Lock |
|
|
|
with open("vectorstore.pkl", "rb") as f: |
|
vectorstore = pickle.load(f) |
|
|
|
|
|
def set_openai_api_key(api_key: str): |
|
"""Set the api key and return chain. |
|
If no api_key, then None is returned. |
|
""" |
|
if api_key: |
|
os.environ["OPENAI_API_KEY"] = api_key |
|
chain = get_chain(vectorstore) |
|
os.environ["OPENAI_API_KEY"] = "" |
|
return chain |
|
|
|
|
|
class ChatWrapper: |
|
|
|
def __init__(self): |
|
self.lock = Lock() |
|
def __call__( |
|
self, api_key: str, inp: str, history: Optional[Tuple[str, str]], chain |
|
): |
|
"""Execute the chat functionality.""" |
|
self.lock.acquire() |
|
chain = set_openai_api_key('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') |
|
try: |
|
history = history or [] |
|
|
|
if chain is None: |
|
history.append((inp, "Please paste your OpenAI key to use")) |
|
return history, history |
|
|
|
import openai |
|
openai.api_key = api_key |
|
|
|
output = chain({"question": inp, "chat_history": history})["answer"] |
|
history.append((inp, output)) |
|
except Exception as e: |
|
raise e |
|
finally: |
|
self.lock.release() |
|
return history, history |
|
|
|
chat = ChatWrapper() |
|
state = gr.State() |
|
|
|
def echo(name, request: gr.Request): |
|
if request: |
|
print("Request headers dictionary:", request.headers) |
|
print("IP address:", request.client.host) |
|
print("Body", request.body) |
|
return name |
|
|
|
def my_inference_function(name): |
|
return "Hello " + name + "!" |
|
|
|
gradio_interface = gr.Interface(chat, inputs=['text',state], outputs=['text', state]) |
|
gradio_interface.launch() |
|
|
|
|
|
|
|
|
|
|