Update app.py
Browse files
app.py
CHANGED
@@ -197,7 +197,8 @@ async def process_final_report(agent, file, chatbot_state: List[Dict[str, str]])
|
|
197 |
|
198 |
if file is None or not hasattr(file, "name"):
|
199 |
messages.append({"role": "assistant", "content": "β Please upload a valid Excel file before analyzing."})
|
200 |
-
|
|
|
201 |
|
202 |
try:
|
203 |
messages.append({"role": "user", "content": f"Processing Excel file: {os.path.basename(file.name)}"})
|
@@ -233,7 +234,8 @@ async def process_final_report(agent, file, chatbot_state: List[Dict[str, str]])
|
|
233 |
chunk_responses = [r for r in chunk_responses if r]
|
234 |
if not chunk_responses:
|
235 |
messages.append({"role": "assistant", "content": "β No valid chunk responses to summarize."})
|
236 |
-
|
|
|
237 |
|
238 |
# Summarize chunk responses incrementally
|
239 |
summary = ""
|
@@ -265,7 +267,8 @@ async def process_final_report(agent, file, chatbot_state: List[Dict[str, str]])
|
|
265 |
current_summary_tokens = estimate_tokens(summary)
|
266 |
except Exception as e:
|
267 |
messages.append({"role": "assistant", "content": f"β Error summarizing intermediate results: {str(e)}"})
|
268 |
-
|
|
|
269 |
|
270 |
summary += f"\n\n### Chunk {i+1} Analysis\n{response}"
|
271 |
current_summary_tokens += response_tokens
|
@@ -295,7 +298,8 @@ async def process_final_report(agent, file, chatbot_state: List[Dict[str, str]])
|
|
295 |
final_report_text += r.content
|
296 |
except Exception as e:
|
297 |
messages.append({"role": "assistant", "content": f"β Error generating final report: {str(e)}"})
|
298 |
-
|
|
|
299 |
|
300 |
final_report = f"# \U0001f9e0 Final Patient Report\n\n{clean_response(final_report_text)}"
|
301 |
messages[-1]["content"] = f"π Final Report:\n\n{clean_response(final_report_text)}"
|
@@ -310,11 +314,13 @@ async def process_final_report(agent, file, chatbot_state: List[Dict[str, str]])
|
|
310 |
messages.append({"role": "assistant", "content": f"β
Report generated and saved: report_{timestamp}.md"})
|
311 |
logger.info(f"Total processing time: {time.time() - start_time:.2f} seconds")
|
312 |
|
|
|
|
|
313 |
except Exception as e:
|
314 |
messages.append({"role": "assistant", "content": f"β Error processing file: {str(e)}"})
|
315 |
logger.error(f"Processing failed: {str(e)}")
|
316 |
-
|
317 |
-
|
318 |
|
319 |
async def create_ui(agent):
|
320 |
"""Create the Gradio UI for the patient history analysis tool."""
|
|
|
197 |
|
198 |
if file is None or not hasattr(file, "name"):
|
199 |
messages.append({"role": "assistant", "content": "β Please upload a valid Excel file before analyzing."})
|
200 |
+
yield messages, report_path
|
201 |
+
return
|
202 |
|
203 |
try:
|
204 |
messages.append({"role": "user", "content": f"Processing Excel file: {os.path.basename(file.name)}"})
|
|
|
234 |
chunk_responses = [r for r in chunk_responses if r]
|
235 |
if not chunk_responses:
|
236 |
messages.append({"role": "assistant", "content": "β No valid chunk responses to summarize."})
|
237 |
+
yield messages, report_path
|
238 |
+
return
|
239 |
|
240 |
# Summarize chunk responses incrementally
|
241 |
summary = ""
|
|
|
267 |
current_summary_tokens = estimate_tokens(summary)
|
268 |
except Exception as e:
|
269 |
messages.append({"role": "assistant", "content": f"β Error summarizing intermediate results: {str(e)}"})
|
270 |
+
yield messages, report_path
|
271 |
+
return
|
272 |
|
273 |
summary += f"\n\n### Chunk {i+1} Analysis\n{response}"
|
274 |
current_summary_tokens += response_tokens
|
|
|
298 |
final_report_text += r.content
|
299 |
except Exception as e:
|
300 |
messages.append({"role": "assistant", "content": f"β Error generating final report: {str(e)}"})
|
301 |
+
yield messages, report_path
|
302 |
+
return
|
303 |
|
304 |
final_report = f"# \U0001f9e0 Final Patient Report\n\n{clean_response(final_report_text)}"
|
305 |
messages[-1]["content"] = f"π Final Report:\n\n{clean_response(final_report_text)}"
|
|
|
314 |
messages.append({"role": "assistant", "content": f"β
Report generated and saved: report_{timestamp}.md"})
|
315 |
logger.info(f"Total processing time: {time.time() - start_time:.2f} seconds")
|
316 |
|
317 |
+
yield messages, report_path
|
318 |
+
|
319 |
except Exception as e:
|
320 |
messages.append({"role": "assistant", "content": f"β Error processing file: {str(e)}"})
|
321 |
logger.error(f"Processing failed: {str(e)}")
|
322 |
+
yield messages, report_path
|
323 |
+
return
|
324 |
|
325 |
async def create_ui(agent):
|
326 |
"""Create the Gradio UI for the patient history analysis tool."""
|