Update app.py
Browse files
app.py
CHANGED
@@ -101,7 +101,6 @@ def process_query(user_query):
|
|
101 |
rag_response = rag_chain.invoke({"input": preprocessed_query})["answer"]
|
102 |
gemini_resp = gemini_response(preprocessed_query, full_pdf_content)
|
103 |
final_response = generate_final_response(user_query, rag_response, gemini_resp)
|
104 |
-
html_content = markdown2.markdown(final_response)
|
105 |
|
106 |
return rag_response, gemini_resp, html_content
|
107 |
except Exception as e:
|
@@ -128,7 +127,7 @@ iface = gr.Interface(
|
|
128 |
outputs=[
|
129 |
gr.Textbox(label="RAG Pipeline (Llama3.1) Response"),
|
130 |
gr.Textbox(label="Long Context (Gemini 1.5 Pro) Response"),
|
131 |
-
gr.
|
132 |
],
|
133 |
title="Data Protection Team",
|
134 |
description="Get responses combining advanced RAG, Long Context, and SOTA models to data protection related questions (GDPR, FERPA, COPPA).",
|
|
|
101 |
rag_response = rag_chain.invoke({"input": preprocessed_query})["answer"]
|
102 |
gemini_resp = gemini_response(preprocessed_query, full_pdf_content)
|
103 |
final_response = generate_final_response(user_query, rag_response, gemini_resp)
|
|
|
104 |
|
105 |
return rag_response, gemini_resp, html_content
|
106 |
except Exception as e:
|
|
|
127 |
outputs=[
|
128 |
gr.Textbox(label="RAG Pipeline (Llama3.1) Response"),
|
129 |
gr.Textbox(label="Long Context (Gemini 1.5 Pro) Response"),
|
130 |
+
gr.Textbox(label="Final (GPT-4o) Response")
|
131 |
],
|
132 |
title="Data Protection Team",
|
133 |
description="Get responses combining advanced RAG, Long Context, and SOTA models to data protection related questions (GDPR, FERPA, COPPA).",
|