Ali2206 commited on
Commit
9086c95
Β·
verified Β·
1 Parent(s): c44bafb

Update ui/ui_core.py

Browse files
Files changed (1) hide show
  1. ui/ui_core.py +33 -25
ui/ui_core.py CHANGED
@@ -3,7 +3,6 @@ import os
3
  import pandas as pd
4
  import pdfplumber
5
  import gradio as gr
6
- import re
7
  from typing import List
8
 
9
  # βœ… Fix: Add src to Python path
@@ -14,8 +13,8 @@ from txagent.txagent import TxAgent
14
  def sanitize_utf8(text: str) -> str:
15
  return text.encode("utf-8", "ignore").decode("utf-8")
16
 
17
- def clean_final_response(response: str) -> str:
18
- return re.split(r"\\[TOOL_CALLS\\]", response)[0].strip()
19
 
20
  def extract_all_text_from_csv_or_excel(file_path: str, progress=None, index=0, total=1) -> str:
21
  try:
@@ -37,7 +36,7 @@ def extract_all_text_from_csv_or_excel(file_path: str, progress=None, index=0, t
37
  line = " | ".join(str(cell) for cell in row if pd.notna(cell))
38
  if line:
39
  lines.append(line)
40
- return f"πŸ“„ {os.path.basename(file_path)}\n\n" + "\n".join(lines)
41
 
42
  except Exception as e:
43
  return f"[Error reading {os.path.basename(file_path)}]: {str(e)}"
@@ -58,7 +57,7 @@ def extract_all_text_from_pdf(file_path: str, progress=None, index=0, total=1) -
58
  progress((index + (i / num_pages)) / total, desc=f"Reading PDF: {os.path.basename(file_path)} ({i+1}/{num_pages})")
59
  except Exception as e:
60
  extracted.append(f"[Error reading page {i+1}]: {str(e)}")
61
- return f"πŸ“„ {os.path.basename(file_path)}\n\n" + "\n\n".join(extracted)
62
 
63
  except Exception as e:
64
  return f"[Error reading PDF {os.path.basename(file_path)}]: {str(e)}"
@@ -82,9 +81,9 @@ def chunk_text(text: str, max_tokens: int = 8192) -> List[str]:
82
 
83
  def create_ui(agent: TxAgent):
84
  with gr.Blocks(theme=gr.themes.Soft()) as demo:
85
- gr.Markdown("<h1 style='text-align: center;'>πŸ“‹ CPS: Clinical Patient Support System</h1>")
86
 
87
- chatbot = gr.Chatbot(label="CPS Assistant", height=600, type="messages", show_copy_button=True)
88
  file_upload = gr.File(
89
  label="Upload Medical File",
90
  file_types=[".pdf", ".txt", ".docx", ".jpg", ".png", ".csv", ".xls", ".xlsx"],
@@ -105,7 +104,8 @@ def create_ui(agent: TxAgent):
105
  )
106
 
107
  try:
108
- yield history + [{"role": "assistant", "content": "⏳ **Processing... Please wait while I analyze the files.**"}]
 
109
 
110
  extracted_text = ""
111
  if uploaded_files and isinstance(uploaded_files, list):
@@ -127,13 +127,15 @@ def create_ui(agent: TxAgent):
127
  sanitized = sanitize_utf8(extracted_text.strip())
128
  chunks = chunk_text(sanitized)
129
 
130
- all_responses = ""
131
  for i, chunk in enumerate(chunks):
132
- full_message = (
133
- f"{context}\n\n--- Uploaded File Chunk {i+1}/{len(chunks)} ---\n\n{chunk}\n\n--- End of Chunk ---\n\nNow begin your reasoning:"
 
134
  )
 
135
  generator = agent.run_gradio_chat(
136
- message=full_message,
137
  history=[],
138
  temperature=0.3,
139
  max_new_tokens=1024,
@@ -143,24 +145,30 @@ def create_ui(agent: TxAgent):
143
  uploaded_files=uploaded_files,
144
  max_round=30
145
  )
 
 
146
  for update in generator:
147
  if isinstance(update, str):
148
- all_responses += update + "\n"
 
 
 
 
 
 
149
 
150
- all_responses = clean_final_response(sanitize_utf8(all_responses.strip()))
151
- final_history = history + [
152
- {"role": "user", "content": message},
153
- {"role": "assistant", "content": all_responses}
154
- ]
155
- yield final_history
156
 
157
  except Exception as chat_error:
158
- print(f"Chat error: {chat_error}")
159
- final_history = history + [
160
- {"role": "user", "content": message},
161
- {"role": "assistant", "content": "❌ An error occurred while processing your request."}
162
- ]
163
- yield final_history
 
164
 
165
  inputs = [message_input, chatbot, conversation_state, file_upload]
166
  send_button.click(fn=handle_chat, inputs=inputs, outputs=chatbot)
 
3
  import pandas as pd
4
  import pdfplumber
5
  import gradio as gr
 
6
  from typing import List
7
 
8
  # βœ… Fix: Add src to Python path
 
13
  def sanitize_utf8(text: str) -> str:
14
  return text.encode("utf-8", "ignore").decode("utf-8")
15
 
16
+ def clean_final_response(text: str) -> str:
17
+ return text.replace("[TOOL_CALLS]", "").strip()
18
 
19
  def extract_all_text_from_csv_or_excel(file_path: str, progress=None, index=0, total=1) -> str:
20
  try:
 
36
  line = " | ".join(str(cell) for cell in row if pd.notna(cell))
37
  if line:
38
  lines.append(line)
39
+ return f"\U0001F4C4 {os.path.basename(file_path)}\n\n" + "\n".join(lines)
40
 
41
  except Exception as e:
42
  return f"[Error reading {os.path.basename(file_path)}]: {str(e)}"
 
57
  progress((index + (i / num_pages)) / total, desc=f"Reading PDF: {os.path.basename(file_path)} ({i+1}/{num_pages})")
58
  except Exception as e:
59
  extracted.append(f"[Error reading page {i+1}]: {str(e)}")
60
+ return f"\U0001F4C4 {os.path.basename(file_path)}\n\n" + "\n\n".join(extracted)
61
 
62
  except Exception as e:
63
  return f"[Error reading PDF {os.path.basename(file_path)}]: {str(e)}"
 
81
 
82
  def create_ui(agent: TxAgent):
83
  with gr.Blocks(theme=gr.themes.Soft()) as demo:
84
+ gr.Markdown("<h1 style='text-align: center;'>\U0001F4CB CPS: Clinical Patient Support System</h1>")
85
 
86
+ chatbot = gr.Chatbot(label="CPS Assistant", height=600, type="text")
87
  file_upload = gr.File(
88
  label="Upload Medical File",
89
  file_types=[".pdf", ".txt", ".docx", ".jpg", ".png", ".csv", ".xls", ".xlsx"],
 
104
  )
105
 
106
  try:
107
+ history.append((message, "⏳ Processing your request..."))
108
+ yield history
109
 
110
  extracted_text = ""
111
  if uploaded_files and isinstance(uploaded_files, list):
 
127
  sanitized = sanitize_utf8(extracted_text.strip())
128
  chunks = chunk_text(sanitized)
129
 
130
+ full_response = ""
131
  for i, chunk in enumerate(chunks):
132
+ chunked_prompt = (
133
+ f"{context}\n\n--- Uploaded File Content (Chunk {i+1}/{len(chunks)}) ---\n\n{chunk}\n\n"
134
+ f"--- End of Chunk ---\n\nNow begin your analysis:"
135
  )
136
+
137
  generator = agent.run_gradio_chat(
138
+ message=chunked_prompt,
139
  history=[],
140
  temperature=0.3,
141
  max_new_tokens=1024,
 
145
  uploaded_files=uploaded_files,
146
  max_round=30
147
  )
148
+
149
+ chunk_response = ""
150
  for update in generator:
151
  if isinstance(update, str):
152
+ chunk_response += update
153
+ elif isinstance(update, list):
154
+ for msg in update:
155
+ if hasattr(msg, 'content'):
156
+ chunk_response += msg.content
157
+
158
+ full_response += chunk_response + "\n\n"
159
 
160
+ full_response = clean_final_response(full_response.strip())
161
+ history[-1] = (message, full_response)
162
+ yield history
 
 
 
163
 
164
  except Exception as chat_error:
165
+ print(f"Chat handling error: {chat_error}")
166
+ error_msg = "An error occurred while processing your request. Please try again."
167
+ if len(history) > 0 and history[-1][1].startswith("⏳"):
168
+ history[-1] = (history[-1][0], error_msg)
169
+ else:
170
+ history.append((message, error_msg))
171
+ yield history
172
 
173
  inputs = [message_input, chatbot, conversation_state, file_upload]
174
  send_button.click(fn=handle_chat, inputs=inputs, outputs=chatbot)