Ali2206 commited on
Commit
6fe469e
Β·
verified Β·
1 Parent(s): e579d17

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -8
app.py CHANGED
@@ -15,6 +15,7 @@ import logging
15
  import torch
16
  import gc
17
  import atexit
 
18
  from diskcache import Cache
19
  from transformers import AutoTokenizer
20
  from datetime import datetime
@@ -278,7 +279,9 @@ class ClinicalOversightApp:
278
  self.agent = self._initialize_agent()
279
  self.text_processor = TextProcessor()
280
  self.file_processor = FileProcessor()
281
- atexit.register(self.cleanup_resources) # Register cleanup on exit
 
 
282
 
283
  def _initialize_agent(self):
284
  """Initialize the TxAgent with proper configuration"""
@@ -306,15 +309,26 @@ class ClinicalOversightApp:
306
  logger.info("AI Agent Ready")
307
  return agent
308
 
 
 
 
 
 
 
309
  def cleanup_resources(self):
310
  """Clean up GPU memory and collect garbage"""
311
  logger.info("Cleaning up resources...")
312
  log_system_resources("Before Cleanup")
313
  torch.cuda.empty_cache()
314
  gc.collect()
315
- if torch.distributed.is_initialized():
316
- logger.info("Destroying PyTorch distributed process group...")
317
- torch.distributed.destroy_process_group()
 
 
 
 
 
318
  log_system_resources("After Cleanup")
319
 
320
  def process_response_stream(self, prompt: str, history: List[dict]) -> Generator[dict, None, None]:
@@ -486,6 +500,7 @@ Patient Record (Chunk {chunk_idx}/{len(chunks)}):
486
  transition: all 0.3s ease;
487
  background: var(--message-bg);
488
  position: relative;
 
489
  }
490
  .message:hover {
491
  transform: translateY(-2px);
@@ -610,6 +625,9 @@ Patient Record (Chunk {chunk_idx}/{len(chunks)}):
610
  z-index: 1000;
611
  animation: fadeIn 0.3s ease;
612
  }
 
 
 
613
  .loading-spinner {
614
  position: absolute;
615
  bottom: 80px;
@@ -663,6 +681,10 @@ Patient Record (Chunk {chunk_idx}/{len(chunks)}):
663
  50% { width: 50%; }
664
  100% { width: 0; }
665
  }
 
 
 
 
666
  :root {
667
  --background: #ffffff;
668
  --text-color: #333333;
@@ -766,13 +788,16 @@ Patient Record (Chunk {chunk_idx}/{len(chunks)}):
766
  "<div class='tooltip' data-tip='Upload patient records'>### πŸ“Ž Upload Records</div>",
767
  elem_classes="markdown-tooltip"
768
  )
 
 
 
769
  file_upload = gr.File(
770
  file_types=[".pdf", ".csv", ".xls", ".xlsx"],
771
  file_count="multiple",
772
  label="Patient Records",
773
- elem_classes="tooltip",
774
- title="Select PDF, CSV, or Excel files"
775
  )
 
776
  gr.Markdown(
777
  "<div class='tooltip' data-tip='Summary of findings'>### πŸ“ Analysis Summary</div>",
778
  elem_classes="markdown-tooltip"
@@ -785,13 +810,16 @@ Patient Record (Chunk {chunk_idx}/{len(chunks)}):
785
  "<div class='tooltip' data-tip='Download full report'>### πŸ“„ Full Report</div>",
786
  elem_classes="markdown-tooltip"
787
  )
 
 
 
788
  download_output = gr.File(
789
  label="Download Report",
790
  visible=False,
791
  interactive=False,
792
- elem_classes="tooltip",
793
- title="Download analysis report"
794
  )
 
795
 
796
  with gr.Row(elem_classes="input-container"):
797
  msg_input = gr.Textbox(
 
15
  import torch
16
  import gc
17
  import atexit
18
+ import signal
19
  from diskcache import Cache
20
  from transformers import AutoTokenizer
21
  from datetime import datetime
 
279
  self.agent = self._initialize_agent()
280
  self.text_processor = TextProcessor()
281
  self.file_processor = FileProcessor()
282
+ atexit.register(self.cleanup_resources)
283
+ signal.signal(signal.SIGTERM, self._signal_handler)
284
+ signal.signal(signal.SIGINT, self._signal_handler)
285
 
286
  def _initialize_agent(self):
287
  """Initialize the TxAgent with proper configuration"""
 
309
  logger.info("AI Agent Ready")
310
  return agent
311
 
312
+ def _signal_handler(self, signum, frame):
313
+ """Handle termination signals"""
314
+ logger.info(f"Received signal {signum}, cleaning up...")
315
+ self.cleanup_resources()
316
+ sys.exit(0)
317
+
318
  def cleanup_resources(self):
319
  """Clean up GPU memory and collect garbage"""
320
  logger.info("Cleaning up resources...")
321
  log_system_resources("Before Cleanup")
322
  torch.cuda.empty_cache()
323
  gc.collect()
324
+ for _ in range(2): # Retry to ensure cleanup
325
+ try:
326
+ if torch.distributed.is_initialized():
327
+ logger.info("Destroying PyTorch distributed process group...")
328
+ torch.distributed.destroy_process_group()
329
+ break
330
+ except Exception as e:
331
+ logger.error(f"Cleanup error: {e}")
332
  log_system_resources("After Cleanup")
333
 
334
  def process_response_stream(self, prompt: str, history: List[dict]) -> Generator[dict, None, None]:
 
500
  transition: all 0.3s ease;
501
  background: var(--message-bg);
502
  position: relative;
503
+ animation: messageFade 0.3s ease;
504
  }
505
  .message:hover {
506
  transform: translateY(-2px);
 
625
  z-index: 1000;
626
  animation: fadeIn 0.3s ease;
627
  }
628
+ .markdown-tooltip, .file-tooltip {
629
+ display: block;
630
+ }
631
  .loading-spinner {
632
  position: absolute;
633
  bottom: 80px;
 
681
  50% { width: 50%; }
682
  100% { width: 0; }
683
  }
684
+ @keyframes messageFade {
685
+ from { opacity: 0; transform: translateY(10px); }
686
+ to { opacity: 1; transform: translateY(0); }
687
+ }
688
  :root {
689
  --background: #ffffff;
690
  --text-color: #333333;
 
788
  "<div class='tooltip' data-tip='Upload patient records'>### πŸ“Ž Upload Records</div>",
789
  elem_classes="markdown-tooltip"
790
  )
791
+ gr.HTML(
792
+ "<div class='file-tooltip' data-tip='Select PDF, CSV, or Excel files'>"
793
+ )
794
  file_upload = gr.File(
795
  file_types=[".pdf", ".csv", ".xls", ".xlsx"],
796
  file_count="multiple",
797
  label="Patient Records",
798
+ elem_classes="file-input"
 
799
  )
800
+ gr.HTML("</div>")
801
  gr.Markdown(
802
  "<div class='tooltip' data-tip='Summary of findings'>### πŸ“ Analysis Summary</div>",
803
  elem_classes="markdown-tooltip"
 
810
  "<div class='tooltip' data-tip='Download full report'>### πŸ“„ Full Report</div>",
811
  elem_classes="markdown-tooltip"
812
  )
813
+ gr.HTML(
814
+ "<div class='file-tooltip' data-tip='Download analysis report'>"
815
+ )
816
  download_output = gr.File(
817
  label="Download Report",
818
  visible=False,
819
  interactive=False,
820
+ elem_classes="file-output"
 
821
  )
822
+ gr.HTML("</div>")
823
 
824
  with gr.Row(elem_classes="input-container"):
825
  msg_input = gr.Textbox(