Ali2206 commited on
Commit
9b25f67
·
verified ·
1 Parent(s): 34a404f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -8
app.py CHANGED
@@ -11,14 +11,13 @@ import shutil
11
  import time
12
  from functools import lru_cache
13
 
 
14
  current_dir = os.path.dirname(os.path.abspath(__file__))
15
-
16
- # ✅ Add src to Python path
17
  src_path = os.path.abspath(os.path.join(current_dir, "src"))
18
  print(f"Adding to path: {src_path}")
19
  sys.path.insert(0, src_path)
20
 
21
- # Configure Hugging Face and cache dirs
22
  base_dir = "/data"
23
  model_cache_dir = os.path.join(base_dir, "txagent_models")
24
  tool_cache_dir = os.path.join(base_dir, "tool_cache")
@@ -28,14 +27,13 @@ os.makedirs(model_cache_dir, exist_ok=True)
28
  os.makedirs(tool_cache_dir, exist_ok=True)
29
  os.makedirs(file_cache_dir, exist_ok=True)
30
 
31
- os.environ["HF_HOME"] = model_cache_dir
32
  os.environ["TRANSFORMERS_CACHE"] = model_cache_dir
 
33
  os.environ["TOKENIZERS_PARALLELISM"] = "false"
34
  os.environ["CUDA_LAUNCH_BLOCKING"] = "1"
35
 
36
  from txagent.txagent import TxAgent
37
 
38
- # ✅ Utils
39
  def sanitize_utf8(text: str) -> str:
40
  return text.encode("utf-8", "ignore").decode("utf-8")
41
 
@@ -56,8 +54,7 @@ def convert_file_to_json(file_path: str, file_type: str) -> str:
56
  return open(cache_path, "r", encoding="utf-8").read()
57
 
58
  if file_type == "csv":
59
- df = pd.read_csv(file_path, encoding_errors="replace", header=None,
60
- dtype=str, skip_blank_lines=False, on_bad_lines="skip")
61
  elif file_type in ["xls", "xlsx"]:
62
  try:
63
  df = pd.read_excel(file_path, engine="openpyxl", header=None, dtype=str)
@@ -143,9 +140,11 @@ def create_ui(agent: TxAgent):
143
  history.append({"role": "assistant", "content": "⏳ Processing your request..."})
144
  yield history
145
 
 
146
  extracted_text = ""
147
  if uploaded_files and isinstance(uploaded_files, list):
148
  extracted_text = convert_files_to_json_parallel(uploaded_files)
 
149
 
150
  context = (
151
  "You are an expert clinical AI assistant. Review this patient's history, "
@@ -154,6 +153,7 @@ def create_ui(agent: TxAgent):
154
  )
155
  chunked_prompt = f"{context}\n\n--- Patient Record ---\n{extracted_text}\n\n[Final Analysis]"
156
 
 
157
  generator = agent.run_gradio_chat(
158
  message=chunked_prompt,
159
  history=[],
@@ -180,6 +180,7 @@ def create_ui(agent: TxAgent):
180
  yield history
181
 
182
  history[-1] = {"role": "assistant", "content": "".join(final_response).strip() or "❌ No response."}
 
183
  yield history
184
 
185
  except Exception as chat_error:
@@ -205,9 +206,25 @@ if __name__ == "__main__":
205
  print("Initializing agent...")
206
  agent = init_agent()
207
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
208
  print("Launching interface...")
209
  demo = create_ui(agent)
210
- demo.queue(concurrency_count=3).launch(
211
  server_name="0.0.0.0",
212
  server_port=7860,
213
  show_error=True,
 
11
  import time
12
  from functools import lru_cache
13
 
14
+ # Environment and path setup
15
  current_dir = os.path.dirname(os.path.abspath(__file__))
 
 
16
  src_path = os.path.abspath(os.path.join(current_dir, "src"))
17
  print(f"Adding to path: {src_path}")
18
  sys.path.insert(0, src_path)
19
 
20
+ # Configure cache directories
21
  base_dir = "/data"
22
  model_cache_dir = os.path.join(base_dir, "txagent_models")
23
  tool_cache_dir = os.path.join(base_dir, "tool_cache")
 
27
  os.makedirs(tool_cache_dir, exist_ok=True)
28
  os.makedirs(file_cache_dir, exist_ok=True)
29
 
 
30
  os.environ["TRANSFORMERS_CACHE"] = model_cache_dir
31
+ os.environ["HF_HOME"] = model_cache_dir
32
  os.environ["TOKENIZERS_PARALLELISM"] = "false"
33
  os.environ["CUDA_LAUNCH_BLOCKING"] = "1"
34
 
35
  from txagent.txagent import TxAgent
36
 
 
37
  def sanitize_utf8(text: str) -> str:
38
  return text.encode("utf-8", "ignore").decode("utf-8")
39
 
 
54
  return open(cache_path, "r", encoding="utf-8").read()
55
 
56
  if file_type == "csv":
57
+ df = pd.read_csv(file_path, encoding_errors="replace", header=None, dtype=str, skip_blank_lines=False, on_bad_lines="skip")
 
58
  elif file_type in ["xls", "xlsx"]:
59
  try:
60
  df = pd.read_excel(file_path, engine="openpyxl", header=None, dtype=str)
 
140
  history.append({"role": "assistant", "content": "⏳ Processing your request..."})
141
  yield history
142
 
143
+ file_process_time = time.time()
144
  extracted_text = ""
145
  if uploaded_files and isinstance(uploaded_files, list):
146
  extracted_text = convert_files_to_json_parallel(uploaded_files)
147
+ print(f"File processing took: {time.time() - file_process_time:.2f}s")
148
 
149
  context = (
150
  "You are an expert clinical AI assistant. Review this patient's history, "
 
153
  )
154
  chunked_prompt = f"{context}\n\n--- Patient Record ---\n{extracted_text}\n\n[Final Analysis]"
155
 
156
+ model_start = time.time()
157
  generator = agent.run_gradio_chat(
158
  message=chunked_prompt,
159
  history=[],
 
180
  yield history
181
 
182
  history[-1] = {"role": "assistant", "content": "".join(final_response).strip() or "❌ No response."}
183
+ print(f"Model processing took: {time.time() - model_start:.2f}s")
184
  yield history
185
 
186
  except Exception as chat_error:
 
206
  print("Initializing agent...")
207
  agent = init_agent()
208
 
209
+ print("Performing warm-up call...")
210
+ try:
211
+ warm_up = agent.run_gradio_chat(
212
+ message="Warm up",
213
+ history=[],
214
+ temperature=0.1,
215
+ max_new_tokens=10,
216
+ max_token=100,
217
+ call_agent=False,
218
+ conversation=[]
219
+ )
220
+ for _ in warm_up:
221
+ pass
222
+ except:
223
+ pass
224
+
225
  print("Launching interface...")
226
  demo = create_ui(agent)
227
+ demo.queue().launch(
228
  server_name="0.0.0.0",
229
  server_port=7860,
230
  show_error=True,