jeremierostan commited on
Commit
723d05e
·
verified ·
1 Parent(s): c671024

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -4
app.py CHANGED
@@ -123,6 +123,13 @@ def load_pdfs(selected_regulations, additional_pdfs):
123
  return "PDFs loaded and RAG system updated successfully!"
124
 
125
  def process_query(user_query):
 
 
 
 
 
 
 
126
  preprocessed_query = preprocess_query(user_query)
127
 
128
  # Get RAG response using Groq
@@ -144,7 +151,7 @@ rag_chain = None
144
  # Gradio interface
145
  with gr.Blocks() as iface:
146
  gr.Markdown("# Data Protection Team")
147
- gr.Markdown("Get responses combining advanced RAG, Long Context, and SOTA models to data protection related questions.")
148
 
149
  with gr.Row():
150
  gdpr_checkbox = gr.Checkbox(label="GDPR (EU)")
@@ -156,7 +163,7 @@ with gr.Blocks() as iface:
156
  overflow-y: auto;
157
  }
158
  """
159
- gr.Markdown("Optional: upload additional PDFs if needed (national regulation, school policy)")
160
  additional_pdfs = gr.File(
161
  file_count="multiple",
162
  label="Upload additional PDFs",
@@ -167,11 +174,11 @@ with gr.Blocks() as iface:
167
  load_button = gr.Button("Load PDFs")
168
  load_output = gr.Textbox(label="Load Status")
169
 
170
- gr.Markdown("Ask your data protection related question")
171
  query_input = gr.Textbox(label="Your Question", placeholder="Ask your question here...")
172
  query_button = gr.Button("Submit Query")
173
 
174
- gr.Markdown("Results")
175
  rag_output = gr.Textbox(label="RAG Pipeline (Llama3.1) Response")
176
  gemini_output = gr.Textbox(label="Long Context (Gemini 1.5 Pro) Response")
177
  final_output = gr.HTML(label="Final (GPT-4o) Response")
 
123
  return "PDFs loaded and RAG system updated successfully!"
124
 
125
  def process_query(user_query):
126
+ global rag_chain, full_pdf_content
127
+
128
+ if rag_chain is None or not full_pdf_content:
129
+ return ("Please load PDFs before asking questions.",
130
+ "Please load PDFs before asking questions.",
131
+ "Please load PDFs and initialize the system before asking questions.")
132
+
133
  preprocessed_query = preprocess_query(user_query)
134
 
135
  # Get RAG response using Groq
 
151
  # Gradio interface
152
  with gr.Blocks() as iface:
153
  gr.Markdown("# Data Protection Team")
154
+ gr.Markdown("**Get responses combining advanced RAG, Long Context, and SOTA models to data protection related questions**")
155
 
156
  with gr.Row():
157
  gdpr_checkbox = gr.Checkbox(label="GDPR (EU)")
 
163
  overflow-y: auto;
164
  }
165
  """
166
+ gr.Markdown("**Optional: upload additional PDFs if needed (national regulation, school policy)**")
167
  additional_pdfs = gr.File(
168
  file_count="multiple",
169
  label="Upload additional PDFs",
 
174
  load_button = gr.Button("Load PDFs")
175
  load_output = gr.Textbox(label="Load Status")
176
 
177
+ gr.Markdown("**Ask your data protection related question**")
178
  query_input = gr.Textbox(label="Your Question", placeholder="Ask your question here...")
179
  query_button = gr.Button("Submit Query")
180
 
181
+ gr.Markdown("**Results**")
182
  rag_output = gr.Textbox(label="RAG Pipeline (Llama3.1) Response")
183
  gemini_output = gr.Textbox(label="Long Context (Gemini 1.5 Pro) Response")
184
  final_output = gr.HTML(label="Final (GPT-4o) Response")