Ali2206 commited on
Commit
e6865f5
·
verified ·
1 Parent(s): bd7074e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -43
app.py CHANGED
@@ -1,5 +1,4 @@
1
  import os
2
- import sys
3
  import torch
4
  import json
5
  import logging
@@ -8,14 +7,14 @@ from importlib.resources import files
8
  from txagent import TxAgent
9
  from tooluniverse import ToolUniverse
10
 
11
- # Logging setup
12
  logging.basicConfig(
13
  level=logging.INFO,
14
  format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
15
  )
16
  logger = logging.getLogger(__name__)
17
 
18
- # Paths and environment
19
  current_dir = os.path.dirname(os.path.abspath(__file__))
20
  os.environ["MKL_THREADING_LAYER"] = "GNU"
21
  os.environ["TOKENIZERS_PARALLELISM"] = "false"
@@ -69,19 +68,14 @@ def patch_embedding_loading():
69
  logger.error("No method found to access tools from ToolUniverse")
70
  return False
71
 
72
- current_count = len(tools)
73
- embedding_count = len(self.tool_desc_embedding)
74
-
75
- if current_count != embedding_count:
76
- logger.warning(f"Tool count mismatch (tools: {current_count}, embeddings: {embedding_count})")
77
- if current_count < embedding_count:
78
- self.tool_desc_embedding = self.tool_desc_embedding[:current_count]
79
- logger.info(f"Truncated embeddings to match {current_count} tools")
80
  else:
81
- last_embedding = self.tool_desc_embedding[-1]
82
- padding = [last_embedding] * (current_count - embedding_count)
83
  self.tool_desc_embedding = torch.cat([self.tool_desc_embedding] + padding)
84
- logger.info(f"Padded embeddings to match {current_count} tools")
85
 
86
  return True
87
 
@@ -90,26 +84,17 @@ def patch_embedding_loading():
90
  return False
91
 
92
  ToolRAGModel.load_tool_desc_embedding = patched_load
93
- logger.info("Successfully patched embedding loading")
94
 
95
  except Exception as e:
96
- logger.error(f"Failed to patch embedding loading: {str(e)}")
97
- raise
98
 
99
  def prepare_tool_files():
100
  os.makedirs(os.path.join(current_dir, 'data'), exist_ok=True)
101
  if not os.path.exists(CONFIG["tool_files"]["new_tool"]):
102
- logger.info("Generating tool list using ToolUniverse...")
103
  try:
104
  tu = ToolUniverse()
105
- if hasattr(tu, 'get_all_tools'):
106
- tools = tu.get_all_tools()
107
- elif hasattr(tu, 'tools'):
108
- tools = tu.tools
109
- else:
110
- tools = []
111
- logger.error("Could not access tools from ToolUniverse")
112
-
113
  with open(CONFIG["tool_files"]["new_tool"], "w") as f:
114
  json.dump(tools, f, indent=2)
115
  logger.info(f"Saved {len(tools)} tools to {CONFIG['tool_files']['new_tool']}")
@@ -119,7 +104,6 @@ def prepare_tool_files():
119
  def create_agent():
120
  patch_embedding_loading()
121
  prepare_tool_files()
122
-
123
  try:
124
  agent = TxAgent(
125
  CONFIG["model_name"],
@@ -134,27 +118,23 @@ def create_agent():
134
  agent.init_model()
135
  return agent
136
  except Exception as e:
137
- logger.error(f"Failed to create agent: {str(e)}")
138
  raise
139
 
140
- def respond(message_input, chat_history, temperature, max_new_tokens, max_tokens, multi_agent, conversation, max_round):
141
- # Ensure the message is a proper string
142
- message = message_input.strip() if isinstance(message_input, str) else ""
143
-
144
- if len(message) <= 10:
145
- return chat_history + [["assistant", "Hi, I am TxAgent, an assistant for answering biomedical questions. Please provide a valid message with a string longer than 10 characters."]]
146
 
147
- # Add user message to history
148
- chat_history = chat_history + [["user", message]]
149
 
150
  print("\n==== DEBUG ====")
151
- print("User Message:", message)
152
  print("Chat History:", chat_history)
153
  print("================\n")
154
 
155
  try:
156
- # Format for model: list of (role, message) tuples
157
- formatted_history = [(role, content) for role, content in chat_history]
158
 
159
  response_generator = agent.run_gradio_chat(
160
  formatted_history,
@@ -173,15 +153,15 @@ def respond(message_input, chat_history, temperature, max_new_tokens, max_tokens
173
  else:
174
  collected += str(chunk)
175
 
176
- chat_history.append(["assistant", collected])
177
  except Exception as e:
178
- chat_history.append(["assistant", f"Error: {str(e)}"])
179
 
180
  return chat_history
181
 
182
  def create_demo(agent):
183
  with gr.Blocks(css=chat_css) as demo:
184
- chatbot = gr.Chatbot(label="TxAgent", render_markdown=True)
185
  msg = gr.Textbox(label="Your question", placeholder="Type your biomedical query...", scale=6)
186
  with gr.Row():
187
  temp = gr.Slider(0, 1, value=0.3, label="Temperature")
@@ -205,7 +185,7 @@ def main():
205
  global agent
206
  agent = create_agent()
207
  demo = create_demo(agent)
208
- demo.launch()
209
  except Exception as e:
210
  logger.error(f"Application failed to start: {str(e)}")
211
  raise
 
1
  import os
 
2
  import torch
3
  import json
4
  import logging
 
7
  from txagent import TxAgent
8
  from tooluniverse import ToolUniverse
9
 
10
+ # Setup logging
11
  logging.basicConfig(
12
  level=logging.INFO,
13
  format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
14
  )
15
  logger = logging.getLogger(__name__)
16
 
17
+ # Env vars
18
  current_dir = os.path.dirname(os.path.abspath(__file__))
19
  os.environ["MKL_THREADING_LAYER"] = "GNU"
20
  os.environ["TOKENIZERS_PARALLELISM"] = "false"
 
68
  logger.error("No method found to access tools from ToolUniverse")
69
  return False
70
 
71
+ if len(tools) != len(self.tool_desc_embedding):
72
+ logger.warning("Tool count and embedding count mismatch.")
73
+ if len(tools) < len(self.tool_desc_embedding):
74
+ self.tool_desc_embedding = self.tool_desc_embedding[:len(tools)]
 
 
 
 
75
  else:
76
+ last_emb = self.tool_desc_embedding[-1]
77
+ padding = [last_emb] * (len(tools) - len(self.tool_desc_embedding))
78
  self.tool_desc_embedding = torch.cat([self.tool_desc_embedding] + padding)
 
79
 
80
  return True
81
 
 
84
  return False
85
 
86
  ToolRAGModel.load_tool_desc_embedding = patched_load
87
+ logger.info("Successfully patched ToolRAGModel")
88
 
89
  except Exception as e:
90
+ logger.error(f"Failed to patch embedding loader: {str(e)}")
 
91
 
92
  def prepare_tool_files():
93
  os.makedirs(os.path.join(current_dir, 'data'), exist_ok=True)
94
  if not os.path.exists(CONFIG["tool_files"]["new_tool"]):
 
95
  try:
96
  tu = ToolUniverse()
97
+ tools = tu.get_all_tools() if hasattr(tu, 'get_all_tools') else getattr(tu, 'tools', [])
 
 
 
 
 
 
 
98
  with open(CONFIG["tool_files"]["new_tool"], "w") as f:
99
  json.dump(tools, f, indent=2)
100
  logger.info(f"Saved {len(tools)} tools to {CONFIG['tool_files']['new_tool']}")
 
104
  def create_agent():
105
  patch_embedding_loading()
106
  prepare_tool_files()
 
107
  try:
108
  agent = TxAgent(
109
  CONFIG["model_name"],
 
118
  agent.init_model()
119
  return agent
120
  except Exception as e:
121
+ logger.error(f"Failed to create TxAgent: {str(e)}")
122
  raise
123
 
124
+ # GRADIO 5.x-compatible message format
125
+ def respond(msg, chat_history, temperature, max_new_tokens, max_tokens, multi_agent, conversation, max_round):
126
+ if not isinstance(msg, str) or len(msg.strip()) <= 10:
127
+ return chat_history + [{"role": "assistant", "content": "Hi, I am TxAgent. Please provide a valid question with more than 10 characters."}]
 
 
128
 
129
+ chat_history = chat_history + [{"role": "user", "content": msg.strip()}]
 
130
 
131
  print("\n==== DEBUG ====")
132
+ print("User Message:", msg)
133
  print("Chat History:", chat_history)
134
  print("================\n")
135
 
136
  try:
137
+ formatted_history = [(m["role"], m["content"]) for m in chat_history]
 
138
 
139
  response_generator = agent.run_gradio_chat(
140
  formatted_history,
 
153
  else:
154
  collected += str(chunk)
155
 
156
+ chat_history.append({"role": "assistant", "content": collected})
157
  except Exception as e:
158
+ chat_history.append({"role": "assistant", "content": f"Error: {str(e)}"})
159
 
160
  return chat_history
161
 
162
  def create_demo(agent):
163
  with gr.Blocks(css=chat_css) as demo:
164
+ chatbot = gr.Chatbot(label="TxAgent", type="messages", render_markdown=True)
165
  msg = gr.Textbox(label="Your question", placeholder="Type your biomedical query...", scale=6)
166
  with gr.Row():
167
  temp = gr.Slider(0, 1, value=0.3, label="Temperature")
 
185
  global agent
186
  agent = create_agent()
187
  demo = create_demo(agent)
188
+ demo.launch(share=False) # Set to True to get a public link
189
  except Exception as e:
190
  logger.error(f"Application failed to start: {str(e)}")
191
  raise