Update app.py
Browse files
app.py
CHANGED
@@ -101,8 +101,10 @@ def process_query(user_query):
|
|
101 |
rag_response = rag_chain.invoke({"input": preprocessed_query})["answer"]
|
102 |
gemini_resp = gemini_response(preprocessed_query, full_pdf_content)
|
103 |
final_response = generate_final_response(user_query, rag_response, gemini_resp)
|
|
|
|
|
104 |
|
105 |
-
return rag_response, gemini_resp,
|
106 |
except Exception as e:
|
107 |
error_message = f"An error occurred: {str(e)}"
|
108 |
return error_message, error_message, error_message
|
@@ -127,7 +129,7 @@ iface = gr.Interface(
|
|
127 |
outputs=[
|
128 |
gr.Textbox(label="RAG Pipeline (Llama3.1) Response"),
|
129 |
gr.Textbox(label="Long Context (Gemini 1.5 Pro) Response"),
|
130 |
-
gr.
|
131 |
],
|
132 |
title="Data Protection Team",
|
133 |
description="Get responses combining advanced RAG, Long Context, and SOTA models to data protection related questions (GDPR, FERPA, COPPA).",
|
|
|
101 |
rag_response = rag_chain.invoke({"input": preprocessed_query})["answer"]
|
102 |
gemini_resp = gemini_response(preprocessed_query, full_pdf_content)
|
103 |
final_response = generate_final_response(user_query, rag_response, gemini_resp)
|
104 |
+
final_output = "# **Final (GPT-4o) Response:** " + final_response
|
105 |
+
html_content = markdown2.markdown(final_output)
|
106 |
|
107 |
+
return rag_response, gemini_resp, html_content
|
108 |
except Exception as e:
|
109 |
error_message = f"An error occurred: {str(e)}"
|
110 |
return error_message, error_message, error_message
|
|
|
129 |
outputs=[
|
130 |
gr.Textbox(label="RAG Pipeline (Llama3.1) Response"),
|
131 |
gr.Textbox(label="Long Context (Gemini 1.5 Pro) Response"),
|
132 |
+
gr.HTML(label="Final (GPT-4) Response")
|
133 |
],
|
134 |
title="Data Protection Team",
|
135 |
description="Get responses combining advanced RAG, Long Context, and SOTA models to data protection related questions (GDPR, FERPA, COPPA).",
|