Commit
·
3ee66e5
1
Parent(s):
603178d
Update app.py
Browse files
app.py
CHANGED
@@ -1,23 +1,19 @@
|
|
1 |
import os
|
2 |
from typing import Optional, Tuple
|
|
|
|
|
3 |
|
4 |
import gradio as gr
|
5 |
-
import pickle
|
6 |
from query_data import get_chain
|
7 |
-
from threading import Lock
|
8 |
|
9 |
with open("vectorstore.pkl", "rb") as f:
|
10 |
vectorstore = pickle.load(f)
|
11 |
|
12 |
-
|
13 |
class ChatWrapper:
|
14 |
-
|
15 |
def __init__(self):
|
16 |
self.lock = Lock()
|
17 |
-
|
18 |
-
|
19 |
-
):
|
20 |
-
def set_openai_api_key(api_key: str):
|
21 |
"""Set the api key and return chain.
|
22 |
If no api_key, then None is returned.
|
23 |
"""
|
@@ -27,43 +23,31 @@ class ChatWrapper:
|
|
27 |
os.environ["OPENAI_API_KEY"] = ""
|
28 |
return chain
|
29 |
|
30 |
-
|
31 |
self.lock.acquire()
|
32 |
api_key = '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'
|
33 |
-
chain = set_openai_api_key(api_key)
|
34 |
try:
|
35 |
history = history or []
|
36 |
# If chain is None, that is because no API key was provided.
|
37 |
if chain is None:
|
38 |
history.append((inp, "Please paste your OpenAI key to use"))
|
39 |
-
return
|
40 |
# Set OpenAI key
|
41 |
import openai
|
42 |
openai.api_key = api_key
|
43 |
# Run chain and append input.
|
44 |
output = chain({"question": inp, "chat_history": history})["answer"]
|
45 |
history.append((inp, output))
|
46 |
-
chatResult = (
|
47 |
except Exception as e:
|
48 |
raise e
|
49 |
finally:
|
50 |
self.lock.release()
|
51 |
-
|
52 |
-
return chatResult, history
|
53 |
|
54 |
-
|
55 |
chat = ChatWrapper()
|
56 |
-
state = gr.State()
|
57 |
-
|
58 |
-
def echo(name, request: gr.Request):
|
59 |
-
if request:
|
60 |
-
print("Request headers dictionary:", request.headers)
|
61 |
-
print("IP address:", request.client.host)
|
62 |
-
print("Body", request.body)
|
63 |
-
return name
|
64 |
-
|
65 |
-
def my_inference_function(name):
|
66 |
-
return "Hello " + name + "!"
|
67 |
|
68 |
-
gradio_interface = gr.Interface(chat, inputs=[
|
69 |
gradio_interface.launch(debug=True)
|
|
|
1 |
import os
|
2 |
from typing import Optional, Tuple
|
3 |
+
from threading import Lock
|
4 |
+
import pickle
|
5 |
|
6 |
import gradio as gr
|
|
|
7 |
from query_data import get_chain
|
|
|
8 |
|
9 |
with open("vectorstore.pkl", "rb") as f:
|
10 |
vectorstore = pickle.load(f)
|
11 |
|
|
|
12 |
class ChatWrapper:
|
|
|
13 |
def __init__(self):
|
14 |
self.lock = Lock()
|
15 |
+
|
16 |
+
def set_openai_api_key(self, api_key: str):
|
|
|
|
|
17 |
"""Set the api key and return chain.
|
18 |
If no api_key, then None is returned.
|
19 |
"""
|
|
|
23 |
os.environ["OPENAI_API_KEY"] = ""
|
24 |
return chain
|
25 |
|
26 |
+
def __call__(self, inp: str, history: Optional[Tuple[str, str]]):
|
27 |
self.lock.acquire()
|
28 |
api_key = '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'
|
29 |
+
chain = self.set_openai_api_key(api_key)
|
30 |
try:
|
31 |
history = history or []
|
32 |
# If chain is None, that is because no API key was provided.
|
33 |
if chain is None:
|
34 |
history.append((inp, "Please paste your OpenAI key to use"))
|
35 |
+
return inp, history
|
36 |
# Set OpenAI key
|
37 |
import openai
|
38 |
openai.api_key = api_key
|
39 |
# Run chain and append input.
|
40 |
output = chain({"question": inp, "chat_history": history})["answer"]
|
41 |
history.append((inp, output))
|
42 |
+
chatResult = (output, history)
|
43 |
except Exception as e:
|
44 |
raise e
|
45 |
finally:
|
46 |
self.lock.release()
|
47 |
+
return chatResult
|
|
|
48 |
|
|
|
49 |
chat = ChatWrapper()
|
50 |
+
state = gr.outputs.State()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
51 |
|
52 |
+
gradio_interface = gr.Interface(chat, inputs=["text", state], outputs=["text", state])
|
53 |
gradio_interface.launch(debug=True)
|