test / app.py
Ali2206's picture
Update app.py
a87f861 verified
raw
history blame
5.94 kB
import os
import json
import logging
import torch
from txagent import TxAgent
import gradio as gr
from tooluniverse import ToolUniverse
# Configuration - Using remote Hugging Face models
CONFIG = {
"model_name": "mims-harvard/TxAgent-T1-Llama-3.1-8B",
"rag_model_name": "mims-harvard/ToolRAG-T1-GTE-Qwen2-1.5B",
"embedding_filename": "ToolRAG-T1-GTE-Qwen2-1.5Btool_embedding_47dc56b3e3ddeb31af4f19defdd538d984de1500368852a0fab80bc2e826c944.pt",
"tool_files": {
"new_tool": "./data/new_tool.json"
}
}
# Logging setup
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)
def prepare_tool_files():
os.makedirs("./data", exist_ok=True)
if not os.path.exists(CONFIG["tool_files"]["new_tool"]):
logger.info("Generating tool list using ToolUniverse...")
tu = ToolUniverse()
tools = tu.get_all_tools()
with open(CONFIG["tool_files"]["new_tool"], "w") as f:
json.dump(tools, f, indent=2)
logger.info(f"Saved {len(tools)} tools to {CONFIG['tool_files']['new_tool']}")
def load_embeddings(agent):
embedding_path = CONFIG["embedding_filename"]
if os.path.exists(embedding_path):
logger.info("βœ… Loading pre-generated embeddings file")
try:
embeddings = torch.load(embedding_path)
agent.rag_model.tool_desc_embedding = embeddings
return
except Exception as e:
logger.error(f"Failed to load embeddings: {e}")
logger.info("Generating tool embeddings...")
try:
tools = agent.tooluniverse.get_all_tools()
descriptions = [tool["description"] for tool in tools]
embeddings = agent.rag_model.generate_embeddings(descriptions)
torch.save(embeddings, embedding_path)
agent.rag_model.tool_desc_embedding = embeddings
logger.info(f"Embeddings saved to {embedding_path}")
except Exception as e:
logger.error(f"Failed to generate embeddings: {e}")
raise
class TxAgentApp:
def __init__(self):
self.agent = None
self.is_initialized = False
def initialize(self):
if self.is_initialized:
return "βœ… Already initialized"
try:
logger.info("Initializing TxAgent with remote models...")
# Initialize without local_files_only parameter
self.agent = TxAgent(
CONFIG["model_name"],
CONFIG["rag_model_name"],
tool_files_dict=CONFIG["tool_files"],
force_finish=True,
enable_checker=True,
step_rag_num=10,
seed=100,
additional_default_tools=["DirectResponse", "RequireClarification"]
)
logger.info("Loading models from Hugging Face Hub...")
self.agent.init_model()
logger.info("Preparing embeddings...")
load_embeddings(self.agent)
self.is_initialized = True
return "βœ… TxAgent initialized successfully (using remote models)"
except Exception as e:
logger.error(f"Initialization failed: {str(e)}")
return f"❌ Initialization failed: {str(e)}"
def chat(self, message, history):
if not self.is_initialized:
return history + [(message, "⚠️ Please initialize the model first")]
try:
response = ""
for chunk in self.agent.run_gradio_chat(
message=message,
history=history,
temperature=0.3,
max_new_tokens=1024,
max_tokens=8192,
multi_agent=False,
conversation=[],
max_round=30
):
response += chunk
yield history + [(message, response)]
except Exception as e:
logger.error(f"Chat error: {str(e)}")
yield history + [(message, f"Error: {str(e)}")]
def create_interface():
app = TxAgentApp()
with gr.Blocks(
title="TxAgent",
css="""
.gradio-container {max-width: 900px !important}
"""
) as demo:
gr.Markdown("""
# 🧠 TxAgent: Therapeutic Reasoning AI
### (Running with remote Hugging Face models)
""")
with gr.Row():
init_btn = gr.Button("Initialize Model", variant="primary")
init_status = gr.Textbox(label="Status", interactive=False)
chatbot = gr.Chatbot(height=500, label="Conversation")
msg = gr.Textbox(label="Your clinical question")
clear_btn = gr.Button("Clear Chat")
gr.Examples(
examples=[
"How to adjust Journavx for renal impairment?",
"Xolremdi and Prozac interaction in WHIM syndrome?",
"Alternative to Warfarin for patient with amiodarone?"
],
inputs=msg
)
init_btn.click(
fn=app.initialize,
outputs=init_status
)
msg.submit(
fn=app.chat,
inputs=[msg, chatbot],
outputs=chatbot
)
clear_btn.click(
fn=lambda: ([], ""),
outputs=[chatbot, msg]
)
return demo
if __name__ == "__main__":
try:
logger.info("Starting application...")
# Prepare local tool files
prepare_tool_files()
# Launch interface
interface = create_interface()
interface.launch(
server_name="0.0.0.0",
server_port=7860,
share=False
)
except Exception as e:
logger.error(f"Fatal error: {str(e)}")
raise