Update app.py
Browse files
app.py
CHANGED
@@ -14,6 +14,7 @@ import subprocess
|
|
14 |
import logging
|
15 |
import torch
|
16 |
import gc
|
|
|
17 |
from diskcache import Cache
|
18 |
from transformers import AutoTokenizer
|
19 |
from datetime import datetime
|
@@ -277,6 +278,7 @@ class ClinicalOversightApp:
|
|
277 |
self.agent = self._initialize_agent()
|
278 |
self.text_processor = TextProcessor()
|
279 |
self.file_processor = FileProcessor()
|
|
|
280 |
|
281 |
def _initialize_agent(self):
|
282 |
"""Initialize the TxAgent with proper configuration"""
|
@@ -307,11 +309,13 @@ class ClinicalOversightApp:
|
|
307 |
def cleanup_resources(self):
|
308 |
"""Clean up GPU memory and collect garbage"""
|
309 |
logger.info("Cleaning up resources...")
|
|
|
310 |
torch.cuda.empty_cache()
|
311 |
gc.collect()
|
312 |
if torch.distributed.is_initialized():
|
313 |
logger.info("Destroying PyTorch distributed process group...")
|
314 |
torch.distributed.destroy_process_group()
|
|
|
315 |
|
316 |
def process_response_stream(self, prompt: str, history: List[dict]) -> Generator[dict, None, None]:
|
317 |
"""Stream the agent's response with proper formatting"""
|
@@ -592,7 +596,7 @@ Patient Record (Chunk {chunk_idx}/{len(chunks)}):
|
|
592 |
position: relative;
|
593 |
}
|
594 |
.tooltip:hover::after {
|
595 |
-
content: attr(data-
|
596 |
position: absolute;
|
597 |
bottom: 100%;
|
598 |
left: 50%;
|
@@ -758,27 +762,35 @@ Patient Record (Chunk {chunk_idx}/{len(chunks)}):
|
|
758 |
tools_button = gr.Button("π Tools", variant="secondary")
|
759 |
|
760 |
with gr.Column(elem_classes="sidebar"):
|
761 |
-
gr.Markdown(
|
|
|
|
|
|
|
762 |
file_upload = gr.File(
|
763 |
file_types=[".pdf", ".csv", ".xls", ".xlsx"],
|
764 |
file_count="multiple",
|
765 |
label="Patient Records",
|
766 |
elem_classes="tooltip",
|
767 |
-
|
|
|
|
|
|
|
|
|
768 |
)
|
769 |
-
gr.Markdown("### π Analysis Summary", elem_classes="tooltip", data_tooltip="Summary of findings")
|
770 |
final_summary = gr.Markdown(
|
771 |
-
"Analysis results will appear here
|
772 |
-
elem_classes="tooltip"
|
773 |
-
|
|
|
|
|
|
|
774 |
)
|
775 |
-
gr.Markdown("### π Full Report", elem_classes="tooltip", data_tooltip="Download full report")
|
776 |
download_output = gr.File(
|
777 |
label="Download Report",
|
778 |
visible=False,
|
779 |
interactive=False,
|
780 |
elem_classes="tooltip",
|
781 |
-
|
782 |
)
|
783 |
|
784 |
with gr.Row(elem_classes="input-container"):
|
@@ -879,7 +891,8 @@ Patient Record (Chunk {chunk_idx}/{len(chunks)}):
|
|
879 |
|
880 |
app.load(
|
881 |
fn=lambda: [
|
882 |
-
[], None, "
|
|
|
883 |
],
|
884 |
outputs=[chatbot, download_output, final_summary, msg_input, file_upload, progress_text, theme_state, sidebar_state, theme_button],
|
885 |
queue=False
|
|
|
14 |
import logging
|
15 |
import torch
|
16 |
import gc
|
17 |
+
import atexit
|
18 |
from diskcache import Cache
|
19 |
from transformers import AutoTokenizer
|
20 |
from datetime import datetime
|
|
|
278 |
self.agent = self._initialize_agent()
|
279 |
self.text_processor = TextProcessor()
|
280 |
self.file_processor = FileProcessor()
|
281 |
+
atexit.register(self.cleanup_resources) # Register cleanup on exit
|
282 |
|
283 |
def _initialize_agent(self):
|
284 |
"""Initialize the TxAgent with proper configuration"""
|
|
|
309 |
def cleanup_resources(self):
|
310 |
"""Clean up GPU memory and collect garbage"""
|
311 |
logger.info("Cleaning up resources...")
|
312 |
+
log_system_resources("Before Cleanup")
|
313 |
torch.cuda.empty_cache()
|
314 |
gc.collect()
|
315 |
if torch.distributed.is_initialized():
|
316 |
logger.info("Destroying PyTorch distributed process group...")
|
317 |
torch.distributed.destroy_process_group()
|
318 |
+
log_system_resources("After Cleanup")
|
319 |
|
320 |
def process_response_stream(self, prompt: str, history: List[dict]) -> Generator[dict, None, None]:
|
321 |
"""Stream the agent's response with proper formatting"""
|
|
|
596 |
position: relative;
|
597 |
}
|
598 |
.tooltip:hover::after {
|
599 |
+
content: attr(data-tip);
|
600 |
position: absolute;
|
601 |
bottom: 100%;
|
602 |
left: 50%;
|
|
|
762 |
tools_button = gr.Button("π Tools", variant="secondary")
|
763 |
|
764 |
with gr.Column(elem_classes="sidebar"):
|
765 |
+
gr.Markdown(
|
766 |
+
"<div class='tooltip' data-tip='Upload patient records'>### π Upload Records</div>",
|
767 |
+
elem_classes="markdown-tooltip"
|
768 |
+
)
|
769 |
file_upload = gr.File(
|
770 |
file_types=[".pdf", ".csv", ".xls", ".xlsx"],
|
771 |
file_count="multiple",
|
772 |
label="Patient Records",
|
773 |
elem_classes="tooltip",
|
774 |
+
title="Select PDF, CSV, or Excel files"
|
775 |
+
)
|
776 |
+
gr.Markdown(
|
777 |
+
"<div class='tooltip' data-tip='Summary of findings'>### π Analysis Summary</div>",
|
778 |
+
elem_classes="markdown-tooltip"
|
779 |
)
|
|
|
780 |
final_summary = gr.Markdown(
|
781 |
+
"<div class='tooltip' data-tip='View analysis results'>Analysis results will appear here...</div>",
|
782 |
+
elem_classes="markdown-tooltip"
|
783 |
+
)
|
784 |
+
gr.Markdown(
|
785 |
+
"<div class='tooltip' data-tip='Download full report'>### π Full Report</div>",
|
786 |
+
elem_classes="markdown-tooltip"
|
787 |
)
|
|
|
788 |
download_output = gr.File(
|
789 |
label="Download Report",
|
790 |
visible=False,
|
791 |
interactive=False,
|
792 |
elem_classes="tooltip",
|
793 |
+
title="Download analysis report"
|
794 |
)
|
795 |
|
796 |
with gr.Row(elem_classes="input-container"):
|
|
|
891 |
|
892 |
app.load(
|
893 |
fn=lambda: [
|
894 |
+
[], None, "<div class='tooltip' data-tip='View analysis results'>Analysis results will appear here...</div>",
|
895 |
+
"", None, {"visible": False}, "light", False, "π Dark Mode"
|
896 |
],
|
897 |
outputs=[chatbot, download_output, final_summary, msg_input, file_upload, progress_text, theme_state, sidebar_state, theme_button],
|
898 |
queue=False
|