Ali2206 commited on
Commit
bced27d
·
verified ·
1 Parent(s): 9dd64ff

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -7
app.py CHANGED
@@ -50,11 +50,7 @@ os.environ.update({
50
  "TOKENIZERS_PARALLELISM": "false",
51
  "CUDA_LAUNCH_BLOCKING": "1"
52
  })
53
- current_dir = os.path.dirname(os.path.abspath(__file__))
54
- src_path = os.path.abspath(os.path.join(current_dir, "src"))
55
- sys.path.insert(0, src_path)
56
 
57
- from txagent.txagent import TxAgent
58
  # Initialize cache with 10GB limit
59
  cache = Cache(file_cache_dir, size_limit=10 * 1024**3)
60
 
@@ -332,7 +328,7 @@ Patient Record Excerpt (Chunk {0} of {1}):
332
 
333
  with gr.Row():
334
  with gr.Column(scale=3):
335
- chatbot = gr.Chatbot(label="Detailed Analysis", height=600)
336
  msg_input = gr.Textbox(placeholder="Ask about potential oversights...", show_label=False)
337
  send_btn = gr.Button("Analyze", variant="primary")
338
  file_upload = gr.File(file_types=[".pdf", ".csv", ".xls", ".xlsx"], file_count="multiple")
@@ -483,8 +479,7 @@ if __name__ == "__main__":
483
  demo = create_ui(agent)
484
  demo.queue(
485
  api_open=False,
486
- max_size=20,
487
- concurrency_count=4
488
  ).launch(
489
  server_name="0.0.0.0",
490
  server_port=7860,
 
50
  "TOKENIZERS_PARALLELISM": "false",
51
  "CUDA_LAUNCH_BLOCKING": "1"
52
  })
 
 
 
53
 
 
54
  # Initialize cache with 10GB limit
55
  cache = Cache(file_cache_dir, size_limit=10 * 1024**3)
56
 
 
328
 
329
  with gr.Row():
330
  with gr.Column(scale=3):
331
+ chatbot = gr.Chatbot(label="Detailed Analysis", height=600, type="messages")
332
  msg_input = gr.Textbox(placeholder="Ask about potential oversights...", show_label=False)
333
  send_btn = gr.Button("Analyze", variant="primary")
334
  file_upload = gr.File(file_types=[".pdf", ".csv", ".xls", ".xlsx"], file_count="multiple")
 
479
  demo = create_ui(agent)
480
  demo.queue(
481
  api_open=False,
482
+ max_size=20
 
483
  ).launch(
484
  server_name="0.0.0.0",
485
  server_port=7860,