mtyrrell commited on
Commit
acb99db
·
1 Parent(s): 9e430dc

added graph viz for fun

Browse files
Files changed (2) hide show
  1. app/main.py +62 -29
  2. requirements.txt +1 -0
app/main.py CHANGED
@@ -2,6 +2,8 @@ import gradio as gr
2
  from gradio_client import Client
3
  from langgraph.graph import StateGraph, START, END
4
  from typing import TypedDict, Optional
 
 
5
 
6
  #OPEN QUESTION: SHOULD WE PASS ALL PARAMS FROM THE ORCHESTRATOR TO THE NODES INSTEAD OF SETTING IN EACH MODULE?
7
 
@@ -99,40 +101,71 @@ ui = gr.Interface(
99
  flagging_mode="never"
100
  )
101
 
 
 
 
 
 
 
 
 
 
 
 
102
  # Guidance for ChatUI - can be removed later. Questionable whether front end even necessary. Maybe nice to show the graph.
103
  with gr.Blocks(title="ChatFed Orchestrator") as demo:
104
  gr.Markdown("# ChatFed Orchestrator")
105
- gr.Markdown("This LangGraph server exposes MCP endpoints for the ChatUI module to call.")
106
- gr.Markdown("**Available MCP Tools:**")
107
- gr.Markdown("- `process_query`: accepts query with optional filters")
108
 
109
- with gr.Accordion("MCP Endpoint Information", open=True):
110
- gr.Markdown(f"""
111
- **MCP Server Endpoint:** https://giz-chatfed-orchestrator.hf.space/gradio_api/mcp/sse
112
-
113
- **For ChatUI Integration:**
114
- ```python
115
- from gradio_client import Client
116
-
117
- # Connect to orchestrator
118
- orchestrator_client = Client("https://giz-chatfed-orchestrator.hf.space")
119
-
120
- # Basic usage (no filters)
121
- response = orchestrator_client.predict(
122
- query="query",
123
- api_name="/process_query"
124
- )
 
125
 
126
- # Advanced usage with any combination of filters
127
- response = orchestrator_client.predict(
128
- query="query",
129
- reports_filter="annual_reports",
130
- sources_filter="internal",
131
- year_filter="2024",
132
- api_name="/process_query"
133
- )
134
- ```
135
- """)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
136
 
137
  with gr.Accordion("Quick Testing Interface", open=True):
138
  ui.render()
 
2
  from gradio_client import Client
3
  from langgraph.graph import StateGraph, START, END
4
  from typing import TypedDict, Optional
5
+ import io
6
+ from PIL import Image
7
 
8
  #OPEN QUESTION: SHOULD WE PASS ALL PARAMS FROM THE ORCHESTRATOR TO THE NODES INSTEAD OF SETTING IN EACH MODULE?
9
 
 
101
  flagging_mode="never"
102
  )
103
 
104
+ # Add a function to generate the graph visualization
105
+ def get_graph_visualization():
106
+ """Generate and return the LangGraph workflow visualization as a PIL Image."""
107
+ # Generate the graph as PNG bytes
108
+ graph_png_bytes = graph.get_graph().draw_mermaid_png()
109
+
110
+ # Convert bytes to PIL Image for Gradio display
111
+ graph_image = Image.open(io.BytesIO(graph_png_bytes))
112
+ return graph_image
113
+
114
+
115
  # Guidance for ChatUI - can be removed later. Questionable whether front end even necessary. Maybe nice to show the graph.
116
  with gr.Blocks(title="ChatFed Orchestrator") as demo:
117
  gr.Markdown("# ChatFed Orchestrator")
118
+ gr.Markdown("This LangGraph server exposes MCP endpoints for the ChatUI module to call (which triggers the graph).")
 
 
119
 
120
+ with gr.Row():
121
+ # Left column - Graph visualization
122
+ with gr.Column(scale=1):
123
+ gr.Markdown("**Workflow Visualization**")
124
+ graph_display = gr.Image(
125
+ value=get_graph_visualization(),
126
+ label="LangGraph Workflow",
127
+ interactive=False,
128
+ height=300
129
+ )
130
+
131
+ # Add a refresh button for the graph
132
+ refresh_graph_btn = gr.Button("🔄 Refresh Graph", size="sm")
133
+ refresh_graph_btn.click(
134
+ fn=get_graph_visualization,
135
+ outputs=graph_display
136
+ )
137
 
138
+ # Right column - Interface and documentation
139
+ with gr.Column(scale=2):
140
+ gr.Markdown("**Available MCP Tools:**")
141
+
142
+ with gr.Accordion("MCP Endpoint Information", open=True):
143
+ gr.Markdown(f"""
144
+ **MCP Server Endpoint:** https://giz-chatfed-orchestrator.hf.space/gradio_api/mcp/sse
145
+
146
+ **For ChatUI Integration:**
147
+ ```python
148
+ from gradio_client import Client
149
+
150
+ # Connect to orchestrator
151
+ orchestrator_client = Client("https://giz-chatfed-orchestrator.hf.space")
152
+
153
+ # Basic usage (no filters)
154
+ response = orchestrator_client.predict(
155
+ query="query",
156
+ api_name="/process_query"
157
+ )
158
+
159
+ # Advanced usage with any combination of filters
160
+ response = orchestrator_client.predict(
161
+ query="query",
162
+ reports_filter="annual_reports",
163
+ sources_filter="internal",
164
+ year_filter="2024",
165
+ api_name="/process_query"
166
+ )
167
+ ```
168
+ """)
169
 
170
  with gr.Accordion("Quick Testing Interface", open=True):
171
  ui.render()
requirements.txt CHANGED
@@ -1,4 +1,5 @@
1
  gradio[mcp]
2
  gradio_client>=1.0.0
3
  langgraph>=0.2.0
 
4
 
 
1
  gradio[mcp]
2
  gradio_client>=1.0.0
3
  langgraph>=0.2.0
4
+ Pillow>=9.0.0
5