Spaces:
Sleeping
Sleeping
adding resolving error messages
Browse files- Gradio_UI.py +5 -2
Gradio_UI.py
CHANGED
@@ -142,8 +142,11 @@ def stream_to_gradio(
|
|
142 |
for step_log in agent.run(task, stream=True, reset=reset_agent_memory, additional_args=additional_args):
|
143 |
# Track tokens if model provides them
|
144 |
if hasattr(agent.model, "last_input_token_count"):
|
145 |
-
|
146 |
-
|
|
|
|
|
|
|
147 |
if isinstance(step_log, ActionStep):
|
148 |
step_log.input_token_count = agent.model.last_input_token_count
|
149 |
step_log.output_token_count = agent.model.last_output_token_count
|
|
|
142 |
for step_log in agent.run(task, stream=True, reset=reset_agent_memory, additional_args=additional_args):
|
143 |
# Track tokens if model provides them
|
144 |
if hasattr(agent.model, "last_input_token_count"):
|
145 |
+
try:
|
146 |
+
total_input_tokens += agent.model.last_input_token_count
|
147 |
+
total_output_tokens += agent.model.last_output_token_count
|
148 |
+
except:
|
149 |
+
pass
|
150 |
if isinstance(step_log, ActionStep):
|
151 |
step_log.input_token_count = agent.model.last_input_token_count
|
152 |
step_log.output_token_count = agent.model.last_output_token_count
|