import os from typing import Optional, Tuple import gradio as gr import pickle from query_data import get_chain from threading import Lock with open("vectorstore.pkl", "rb") as f: vectorstore = pickle.load(f) def set_openai_api_key(api_key: str): """Set the api key and return chain. If no api_key, then None is returned. """ if api_key: os.environ["OPENAI_API_KEY"] = api_key chain = get_chain(vectorstore) os.environ["OPENAI_API_KEY"] = "" return chain set_openai_api_key('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') class ChatWrapper: def __init__(self): self.lock = Lock() def __call__( self, api_key: str, inp: str, history: Optional[Tuple[str, str]], chain ): """Execute the chat functionality.""" self.lock.acquire() try: history = history or [] # If chain is None, that is because no API key was provided. if chain is None: history.append((inp, "Please paste your OpenAI key to use")) return history, history # Set OpenAI key import openai openai.api_key = api_key # Run chain and append input. output = chain({"question": inp, "chat_history": history})["answer"] history.append((inp, output)) except Exception as e: raise e finally: self.lock.release() return history, history chat = ChatWrapper() state = gr.State() agent_state = gr.State() def echo(name, request: gr.Request): if request: print("Request headers dictionary:", request.headers) print("IP address:", request.client.host) print("Body", request.body) return name def my_inference_function(name): return "Hello " + name + "!" gradio_interface = gr.Interface( fn = chat, inputs = [openai_api_key_textbox, message, state, agent_state], outputs=[chatbot, state], outputs = [state, agent_state] ) gradio_interface.launch()