Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -77,7 +77,7 @@ def retrieve_document(query):
|
|
77 |
embeddings_file = requests.get(API_URL_EMBEDDINGS)
|
78 |
metadata_file = requests.get(API_URL_METADATA)
|
79 |
|
80 |
-
print(embeddings_file, metadata_file)
|
81 |
# Generate query embedding
|
82 |
query_embedding = embedding_model.encode([query]).astype(np.float32)
|
83 |
|
@@ -127,10 +127,9 @@ def split_text(text, chunk_size=500):
|
|
127 |
print("splitting")
|
128 |
return [text[i:i+chunk_size] for i in range(0, len(text), chunk_size)]
|
129 |
|
130 |
-
def chatbot(
|
131 |
"""Processes the PDF and answers the user's question."""
|
132 |
print("chatbot start")
|
133 |
-
|
134 |
|
135 |
# retrieve the document relevant to the query
|
136 |
doc = retrieve_document(user_question)
|
@@ -170,7 +169,7 @@ iface = gr.TabbedInterface(
|
|
170 |
[
|
171 |
gr.Interface(
|
172 |
fn=chatbot,
|
173 |
-
inputs=[gr.File(
|
174 |
outputs=gr.Textbox(label="Answer"),
|
175 |
title="PDF Q&A Chatbot (Powered by Together.AI)",
|
176 |
),
|
|
|
77 |
embeddings_file = requests.get(API_URL_EMBEDDINGS)
|
78 |
metadata_file = requests.get(API_URL_METADATA)
|
79 |
|
80 |
+
print(embeddings_file.get_json(), metadata_file.get_json())
|
81 |
# Generate query embedding
|
82 |
query_embedding = embedding_model.encode([query]).astype(np.float32)
|
83 |
|
|
|
127 |
print("splitting")
|
128 |
return [text[i:i+chunk_size] for i in range(0, len(text), chunk_size)]
|
129 |
|
130 |
+
def chatbot(user_question):
|
131 |
"""Processes the PDF and answers the user's question."""
|
132 |
print("chatbot start")
|
|
|
133 |
|
134 |
# retrieve the document relevant to the query
|
135 |
doc = retrieve_document(user_question)
|
|
|
169 |
[
|
170 |
gr.Interface(
|
171 |
fn=chatbot,
|
172 |
+
inputs=[gr.File(gr.Textbox(label="Ask a Question")],
|
173 |
outputs=gr.Textbox(label="Answer"),
|
174 |
title="PDF Q&A Chatbot (Powered by Together.AI)",
|
175 |
),
|