File size: 9,163 Bytes
25e2c05 a6968c2 973658c 41eb6bd a6968c2 3dfd69d a6968c2 3dfd69d a6968c2 1ba0100 a6968c2 41eb6bd a6968c2 41eb6bd a6968c2 2e43581 a6968c2 26668b6 41eb6bd 2e43581 a6968c2 3dfd69d a6968c2 41eb6bd 2e43581 41eb6bd a6968c2 41eb6bd a6968c2 818eb65 41eb6bd 818eb65 a6968c2 818eb65 a6968c2 04db5d2 1ba0100 2e43581 04db5d2 1ba0100 f640ef8 04db5d2 a6968c2 818eb65 3dfd69d 818eb65 1ba0100 818eb65 3deb36c 41eb6bd 1ba0100 41eb6bd 1ba0100 41eb6bd c3218a0 41eb6bd 96347cc 818eb65 41eb6bd 2e43581 ffd15e8 2e43581 ffd15e8 2e43581 ffd15e8 2e43581 a6968c2 3deb36c 41eb6bd 1ba0100 26668b6 2e43581 1ba0100 2e43581 26668b6 41eb6bd 2e43581 26668b6 2e43581 41eb6bd 818eb65 1ba0100 41eb6bd a6968c2 fe67870 e24be23 818eb65 96347cc |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 |
import sys
import os
import pdfplumber
import json
import gradio as gr
from typing import List
from concurrent.futures import ThreadPoolExecutor, as_completed
import hashlib
import re
import psutil
import subprocess
# Persistent directory
persistent_dir = "/data/hf_cache"
os.makedirs(persistent_dir, exist_ok=True)
model_cache_dir = os.path.join(persistent_dir, "txagent_models")
file_cache_dir = os.path.join(persistent_dir, "cache")
report_dir = os.path.join(persistent_dir, "reports")
vllm_cache_dir = os.path.join(persistent_dir, "vllm_cache")
for directory in [model_cache_dir, file_cache_dir, report_dir, vllm_cache_dir]:
os.makedirs(directory, exist_ok=True)
os.environ["HF_HOME"] = model_cache_dir
os.environ["TRANSFORMERS_CACHE"] = model_cache_dir
os.environ["VLLM_CACHE_DIR"] = vllm_cache_dir
os.environ["TOKENIZERS_PARALLELISM"] = "false"
os.environ["CUDA_LAUNCH_BLOCKING"] = "1"
current_dir = os.path.dirname(os.path.abspath(__file__))
src_path = os.path.abspath(os.path.join(current_dir, "src"))
sys.path.insert(0, src_path)
from txagent.txagent import TxAgent
def sanitize_utf8(text: str) -> str:
return text.encode("utf-8", "ignore").decode("utf-8")
def file_hash(path: str) -> str:
with open(path, "rb") as f:
return hashlib.md5(f.read()).hexdigest()
def extract_all_pages(file_path: str) -> str:
try:
text_chunks = []
with pdfplumber.open(file_path) as pdf:
for i, page in enumerate(pdf.pages):
page_text = page.extract_text() or ""
text_chunks.append(page_text.strip())
return "\n".join(text_chunks)
except Exception as e:
return f"PDF processing error: {str(e)}"
def convert_file_to_json(file_path: str, file_type: str) -> str:
try:
h = file_hash(file_path)
cache_path = os.path.join(file_cache_dir, f"{h}.json")
if os.path.exists(cache_path):
with open(cache_path, "r", encoding="utf-8") as f:
return f.read()
if file_type == "pdf":
text = extract_all_pages(file_path)
result = json.dumps({"filename": os.path.basename(file_path), "content": text, "status": "initial"})
else:
result = json.dumps({"error": f"Unsupported file type: {file_type}"})
with open(cache_path, "w", encoding="utf-8") as f:
f.write(result)
return result
except Exception as e:
return json.dumps({"error": f"Error processing {os.path.basename(file_path)}: {str(e)}"})
def log_system_usage(tag=""):
try:
cpu = psutil.cpu_percent(interval=1)
mem = psutil.virtual_memory()
print(f"[{tag}] CPU: {cpu}% | RAM: {mem.used // (1024**2)}MB / {mem.total // (1024**2)}MB")
result = subprocess.run(
["nvidia-smi", "--query-gpu=memory.used,memory.total,utilization.gpu", "--format=csv,nounits,noheader"],
capture_output=True, text=True
)
if result.returncode == 0:
used, total, util = result.stdout.strip().split(", ")
print(f"[{tag}] GPU: {used}MB / {total}MB | Utilization: {util}%")
except Exception as e:
print(f"[{tag}] GPU/CPU monitor failed: {e}")
def clean_response(text: str) -> str:
text = sanitize_utf8(text)
text = re.sub(r"\[TOOL_CALLS\].*", "", text, flags=re.DOTALL)
text = re.sub(r"\['get_[^\]]+\']\n?", "", text)
text = re.sub(r"\{'meta':\s*\{.*?\}\s*,\s*'results':\s*\[.*?\]\}\n?", "", text, flags=re.DOTALL)
text = re.sub(r"(?i)(to analyze|based on|will start|no (drug|clinical|information)|none).*?\n", "", text, flags=re.DOTALL)
text = re.sub(r"\n{3,}", "\n\n", text).strip()
if not re.search(r"(Missed Diagnoses|Medication Conflicts|Incomplete Assessments|Urgent Follow-up)", text, re.IGNORECASE):
return ""
return text
def init_agent():
print("π Initializing model...")
log_system_usage("Before Load")
agent = TxAgent(
model_name="mims-harvard/TxAgent-T1-Llama-3.1-8B",
rag_model_name="mims-harvard/ToolRAG-T1-GTE-Qwen2-1.5B",
force_finish=True,
enable_checker=True,
step_rag_num=1,
seed=100,
)
agent.init_model()
log_system_usage("After Load")
print("β
Agent Ready")
return agent
def create_ui(agent):
with gr.Blocks(theme=gr.themes.Soft()) as demo:
gr.Markdown("<h1 style='text-align: center;'>π©Ί Clinical Oversight Assistant</h1>")
chatbot = gr.Chatbot(label="Analysis", height=600, type="messages")
file_upload = gr.File(file_types=[".pdf"], file_count="multiple")
msg_input = gr.Textbox(placeholder="Ask about potential oversights...", show_label=False)
send_btn = gr.Button("Analyze", variant="primary")
download_output = gr.File(label="Download Report")
def analyze(message: str, history: List[dict], files: List):
history.append({"role": "user", "content": message})
yield history, None
extracted = ""
file_hash_value = ""
if files:
with ThreadPoolExecutor(max_workers=6) as executor:
futures = [executor.submit(convert_file_to_json, f.name, f.name.split(".")[-1].lower()) for f in files]
results = [sanitize_utf8(f.result()) for f in as_completed(futures)]
extracted = "\n".join(results)
file_hash_value = file_hash(files[0].name) if files else ""
# Split into small chunks of 2,000 characters
chunk_size = 2000
chunks = [extracted[i:i + chunk_size] for i in range(0, len(extracted), chunk_size)]
combined_response = ""
prompt_template = f"""
List doctor oversights in the medical records under these headings with brief details:
**Missed Diagnoses**: Unaddressed conditions or inconsistencies.
**Medication Conflicts**: Risky prescriptions.
**Incomplete Assessments**: Missing evaluations.
**Urgent Follow-up**: Issues needing attention.
Records:
{{chunk}}
"""
try:
history.append({"role": "assistant", "content": "π Analyzing..."})
yield history, None
for chunk_idx, chunk in enumerate(chunks, 1):
prompt = prompt_template.format(chunk=chunk)
chunk_response = ""
for output in agent.run_gradio_chat(
message=prompt,
history=[],
temperature=0.1,
max_new_tokens=256,
max_token=4096,
call_agent=False,
conversation=[],
):
if output is None:
continue
if isinstance(output, list):
for m in output:
if hasattr(m, 'content') and m.content:
cleaned = clean_response(m.content)
if cleaned:
chunk_response += cleaned + "\n"
history[-1]["content"] = combined_response + chunk_response.strip()
yield history, None
elif isinstance(output, str) and output.strip():
cleaned = clean_response(output)
if cleaned:
chunk_response += cleaned + "\n"
history[-1]["content"] = combined_response + chunk_response.strip()
yield history, None
if chunk_response:
combined_response += chunk_response
if not combined_response:
history[-1]["content"] = "No oversights identified."
else:
history[-1]["content"] = combined_response.strip()
report_path = os.path.join(report_dir, f"{file_hash_value}_report.txt") if file_hash_value else None
if report_path and combined_response:
with open(report_path, "w", encoding="utf-8") as f:
f.write(combined_response)
yield history, report_path if report_path and os.path.exists(report_path) else None
except Exception as e:
print("π¨ ERROR:", e)
history[-1]["content"] = f"β Error: {str(e)}"
yield history, None
send_btn.click(analyze, inputs=[msg_input, gr.State([]), file_upload], outputs=[chatbot, download_output])
msg_input.submit(analyze, inputs=[msg_input, gr.State([]), file_upload], outputs=[chatbot, download_output])
return demo
if __name__ == "__main__":
print("π Launching app...")
agent = init_agent()
demo = create_ui(agent)
demo.queue(api_open=False).launch(
server_name="0.0.0.0",
server_port=7860,
show_error=True,
allowed_paths=[report_dir],
share=False
) |