import os import sys import gradio as gr from multiprocessing import freeze_support import importlib import inspect import json import logging # === Fix path to include src/txagent sys.path.insert(0, os.path.join(os.path.dirname(__file__), "src")) # === Import and reload to ensure correct file import txagent.txagent importlib.reload(txagent.txagent) from txagent.txagent import TxAgent # === Debug info print(">>> TxAgent loaded from:", inspect.getfile(TxAgent)) print(">>> TxAgent has run_gradio_chat:", hasattr(TxAgent, "run_gradio_chat")) # === Logging logging.basicConfig(level=logging.INFO) # === Environment current_dir = os.path.abspath(os.path.dirname(__file__)) os.environ["MKL_THREADING_LAYER"] = "GNU" os.environ["TOKENIZERS_PARALLELISM"] = "false" # === Model config model_name = "mims-harvard/TxAgent-T1-Llama-3.1-8B" rag_model_name = "mims-harvard/ToolRAG-T1-GTE-Qwen2-1.5B" new_tool_files = { "new_tool": os.path.join(current_dir, "data", "new_tool.json") } # === Example prompts question_examples = [ ["Given a patient with WHIM syndrome on prophylactic antibiotics, is it advisable to co-administer Xolremdi with fluconazole?"], ["What treatment options exist for HER2+ breast cancer resistant to trastuzumab?"] ] # === Extract tool name and format output def extract_tool_name_and_clean_content(msg): import re tool_name = "Tool Result" content = msg.get("content") if isinstance(msg, dict) else getattr(msg, "content", "") tool_calls = msg.get("tool_calls") if isinstance(msg, dict) else getattr(msg, "tool_calls", None) # Try to extract tool name from tool_calls JSON if tool_calls: try: if isinstance(tool_calls, str): tool_calls = json.loads(tool_calls) if isinstance(tool_calls, list) and tool_calls: tool_name = tool_calls[0].get("name", "Tool Result") except Exception as e: logging.warning(f"[extract_tool_name] Failed tool_calls parsing: {e}") # Try fallback: extract from [TOOL_CALLS] JSON inside raw content if "TOOL_CALLS" in str(content): try: match = re.search(r"\[TOOL_CALLS\](\[.*?\])", str(content)) if match: embedded = json.loads(match.group(1)) if isinstance(embedded, list) and embedded: tool_name = embedded[0].get("name", "Tool Result") except Exception as e: logging.warning(f"[extract_tool_name] Failed TOOL_CALLS content parse: {e}") if isinstance(content, (dict, list)): content = json.dumps(content, indent=2) return f"Tool: {tool_name}", content # === Format answer in collapsible box def format_collapsible(content, title="Answer"): return ( f"
" f"{title}" f"
{content}
" ) # === Build UI def create_ui(agent): with gr.Blocks(theme=gr.themes.Soft()) as demo: gr.Markdown("

💊 TxAgent: Therapeutic Reasoning

") gr.Markdown("Ask biomedical or therapeutic questions. Powered by tool-augmented reasoning.") chatbot = gr.Chatbot(label="TxAgent", height=600, type="messages") message_input = gr.Textbox(placeholder="Ask a biomedical question...", show_label=False) send_button = gr.Button("Send", variant="primary") conversation_state = gr.State([]) def handle_chat(message, history, conversation): generator = agent.run_gradio_chat( message=message, history=history, temperature=0.3, max_new_tokens=1024, max_token=8192, call_agent=False, conversation=conversation, max_round=30 ) for update in generator: formatted = [] for m in update: role = m.get("role") if isinstance(m, dict) else getattr(m, "role", "assistant") if role == "assistant": title, clean = extract_tool_name_and_clean_content(m) content = format_collapsible(clean, title) else: content = m.get("content") if isinstance(m, dict) else getattr(m, "content", "") formatted.append({"role": role, "content": content}) yield formatted inputs = [message_input, chatbot, conversation_state] send_button.click(fn=handle_chat, inputs=inputs, outputs=chatbot) message_input.submit(fn=handle_chat, inputs=inputs, outputs=chatbot) gr.Examples(examples=question_examples, inputs=message_input) gr.Markdown("DISCLAIMER: This demo is for research purposes only and does not provide medical advice.") return demo # === Main if __name__ == "__main__": freeze_support() try: agent = TxAgent( model_name=model_name, rag_model_name=rag_model_name, tool_files_dict=new_tool_files, force_finish=True, enable_checker=True, step_rag_num=10, seed=100, additional_default_tools=[] ) agent.init_model() if not hasattr(agent, "run_gradio_chat"): raise AttributeError("❌ TxAgent is missing `run_gradio_chat`.") demo = create_ui(agent) demo.queue().launch( server_name="0.0.0.0", server_port=7860, show_error=True ) except Exception as e: print(f"❌ App failed to start: {e}") raise