Ali2206 commited on
Commit
6cafd98
·
verified ·
1 Parent(s): a541de1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -24
app.py CHANGED
@@ -1,4 +1,8 @@
1
-
 
 
 
 
2
  import gradio as gr
3
  from typing import List
4
  from concurrent.futures import ThreadPoolExecutor, as_completed
@@ -161,41 +165,36 @@ Medical Records:
161
 
162
  ### Potential Oversights:
163
  """
164
- response_chunks = []
165
  try:
166
  for chunk in agent.run_gradio_chat(
167
  message=prompt,
168
  history=[],
169
  temperature=0.2,
170
- max_new_tokens=1024,
171
  max_token=4096,
172
  call_agent=False,
173
- conversation=[]
174
  ):
175
- if not chunk:
176
  continue
177
  if isinstance(chunk, str):
178
- response_chunks.append(chunk)
179
  elif isinstance(chunk, list):
180
- response_chunks.extend([c.content for c in chunk if hasattr(c, 'content')])
181
- partial_response = "".join(response_chunks)
182
- cleaned_partial = partial_response.split("[TOOL_CALLS]")[0].strip()
183
- if cleaned_partial:
184
- history[-1] = {"role": "assistant", "content": cleaned_partial}
185
- yield history, None
186
- except Exception as e:
187
- history[-1] = {"role": "assistant", "content": f"❌ Error: {str(e)}"}
188
- yield history, None
189
- return
190
 
191
- full_response = "".join(response_chunks)
192
- final_output = full_response.split("[TOOL_CALLS]")[0].strip()
193
- if not final_output:
194
- final_output = "No clear oversights identified. Recommend comprehensive review."
195
- history[-1] = {"role": "assistant", "content": final_output}
196
 
197
- report_path = os.path.join(report_dir, f"{file_hash_value}_report.txt") if file_hash_value else None
198
- yield history, report_path if report_path and os.path.exists(report_path) else None
 
 
 
 
 
 
199
 
200
  send_btn.click(analyze, inputs=[msg_input, gr.State([]), file_upload], outputs=[chatbot, download_output])
201
  msg_input.submit(analyze, inputs=[msg_input, gr.State([]), file_upload], outputs=[chatbot, download_output])
@@ -211,4 +210,4 @@ if __name__ == "__main__":
211
  show_error=True,
212
  allowed_paths=[report_dir],
213
  share=False
214
- )
 
1
+ import sys
2
+ import os
3
+ import pandas as pd
4
+ import pdfplumber
5
+ import json
6
  import gradio as gr
7
  from typing import List
8
  from concurrent.futures import ThreadPoolExecutor, as_completed
 
165
 
166
  ### Potential Oversights:
167
  """
168
+ response = ""
169
  try:
170
  for chunk in agent.run_gradio_chat(
171
  message=prompt,
172
  history=[],
173
  temperature=0.2,
174
+ max_new_tokens=2048,
175
  max_token=4096,
176
  call_agent=False,
177
+ conversation=[],
178
  ):
179
+ if chunk is None:
180
  continue
181
  if isinstance(chunk, str):
182
+ response += chunk
183
  elif isinstance(chunk, list):
184
+ response += "".join([c.content for c in chunk if hasattr(c, 'content') and c.content])
 
 
 
 
 
 
 
 
 
185
 
186
+ cleaned = response.split("[TOOL_CALLS]")[0].strip()
187
+ if not cleaned:
188
+ cleaned = "No clear oversights identified. Recommend comprehensive review."
 
 
189
 
190
+ history[-1] = {"role": "assistant", "content": cleaned}
191
+ report_path = os.path.join(report_dir, f"{file_hash_value}_report.txt") if file_hash_value else None
192
+ yield history, report_path if report_path and os.path.exists(report_path) else None
193
+
194
+ except Exception as e:
195
+ print("🚨 ERROR:", e)
196
+ history[-1] = {"role": "assistant", "content": f"❌ Error occurred: {str(e)}"}
197
+ yield history, None
198
 
199
  send_btn.click(analyze, inputs=[msg_input, gr.State([]), file_upload], outputs=[chatbot, download_output])
200
  msg_input.submit(analyze, inputs=[msg_input, gr.State([]), file_upload], outputs=[chatbot, download_output])
 
210
  show_error=True,
211
  allowed_paths=[report_dir],
212
  share=False
213
+ )