Spaces:
Running
Running
Commit
Β·
422ac3a
1
Parent(s):
18eb5f6
Changed thinking streaming
Browse files- src/gradio_utils.py +27 -1
src/gradio_utils.py
CHANGED
@@ -3,7 +3,7 @@ import gradio as gr
|
|
3 |
import numpy as np
|
4 |
import pandas as pd
|
5 |
from gradio_folium import Folium
|
6 |
-
from smolagents.gradio_ui import pull_messages_from_step
|
7 |
from smolagents.types import handle_agent_output_types, AgentText
|
8 |
from folium import Map, TileLayer, Marker, Icon, Popup
|
9 |
from folium.plugins import Fullscreen
|
@@ -89,12 +89,38 @@ def update_map_on_selection(row: pd.Series, df_routes: gr.State) -> Map:
|
|
89 |
|
90 |
return f_map
|
91 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
92 |
|
93 |
# Simplified interaction function
|
94 |
def interact_with_agent(agent, prompt, messages, df_routes, additional_args):
|
95 |
|
96 |
messages.append(gr.ChatMessage(role="user", content=prompt))
|
97 |
yield (messages, df_routes, gr.Textbox(value=FINAL_MESSAGE_HEADER, container=True))
|
|
|
|
|
98 |
|
99 |
for msg, _df_routes, final_message in stream_to_gradio(
|
100 |
agent,
|
|
|
3 |
import numpy as np
|
4 |
import pandas as pd
|
5 |
from gradio_folium import Folium
|
6 |
+
#from smolagents.gradio_ui import pull_messages_from_step
|
7 |
from smolagents.types import handle_agent_output_types, AgentText
|
8 |
from folium import Map, TileLayer, Marker, Icon, Popup
|
9 |
from folium.plugins import Fullscreen
|
|
|
89 |
|
90 |
return f_map
|
91 |
|
92 |
+
def pull_messages_from_step(step_log: AgentStepLog, test_mode: bool = True):
|
93 |
+
"""Extract ChatMessage objects from agent steps"""
|
94 |
+
if isinstance(step_log, ActionStep):
|
95 |
+
yield gr.ChatMessage(role="assistant", content=step_log.llm_output or "", metadata={"title": "π€ππ"},)
|
96 |
+
if step_log.tool_calls is not None:
|
97 |
+
first_tool_call = step_log.tool_calls[0]
|
98 |
+
used_code = first_tool_call.name == "code interpreter"
|
99 |
+
content = first_tool_call.arguments
|
100 |
+
if used_code:
|
101 |
+
content = f"```py\n{content}\n```"
|
102 |
+
yield gr.ChatMessage(
|
103 |
+
role="assistant",
|
104 |
+
metadata={"title": "π€ππ"},
|
105 |
+
content=str(content),
|
106 |
+
)
|
107 |
+
if step_log.observations is not None:
|
108 |
+
yield gr.ChatMessage(role="assistant", content=step_log.observations, metadata={"title": "π€ππ"},)
|
109 |
+
if step_log.error is not None:
|
110 |
+
yield gr.ChatMessage(
|
111 |
+
role="assistant",
|
112 |
+
content=str(step_log.error),
|
113 |
+
metadata={"title": "π₯ Error"},
|
114 |
+
)
|
115 |
+
|
116 |
|
117 |
# Simplified interaction function
|
118 |
def interact_with_agent(agent, prompt, messages, df_routes, additional_args):
|
119 |
|
120 |
messages.append(gr.ChatMessage(role="user", content=prompt))
|
121 |
yield (messages, df_routes, gr.Textbox(value=FINAL_MESSAGE_HEADER, container=True))
|
122 |
+
|
123 |
+
#messages.append(gr.ChatMessage(role="assistant", content="", "title": "π€ππ"))
|
124 |
|
125 |
for msg, _df_routes, final_message in stream_to_gradio(
|
126 |
agent,
|