Spaces:
Sleeping
Sleeping
import gradio as gr | |
from modules.data_class import DataState | |
from modules.tools import data_node | |
from modules.nodes import chatbot_with_tools, human_node, maybe_exit_human_node, maybe_route_to_tools | |
from langgraph.graph import StateGraph, START, END | |
from IPython.display import Image, display | |
from pprint import pprint | |
from typing import Literal | |
from langgraph.prebuilt import ToolNode | |
from collections.abc import Iterable | |
from IPython.display import display, clear_output | |
import sys | |
# Define the LangGraph chatbot | |
graph_builder = StateGraph(DataState) | |
# Add nodes | |
graph_builder.add_node("chatbot_healthassistant", chatbot_with_tools) | |
graph_builder.add_node("patient", human_node) | |
graph_builder.add_node("documenting", data_node) | |
# Define edges | |
graph_builder.add_conditional_edges("chatbot_healthassistant", maybe_route_to_tools) | |
graph_builder.add_conditional_edges("patient", maybe_exit_human_node) | |
graph_builder.add_edge("documenting", "chatbot_healthassistant") | |
graph_builder.add_edge(START, "chatbot_healthassistant") | |
# Compile the graph | |
graph_with_order_tools = graph_builder.compile() | |
# Function to handle conversation | |
def chat_interface(user_input, history): | |
if not history: | |
history = [] | |
# Create the initial state | |
state = DataState(messages=history, data={}, finished=False) | |
# Run the chatbot | |
for output in graph_with_order_tools.stream(state): | |
response = output["messages"][-1] # Extract the latest chatbot response | |
history.append(("User: " + user_input, "Bot: " + response)) | |
return "", history # Return empty input and updated chat history | |
# Launch Gradio UI | |
iface = gr.ChatInterface( | |
fn=chat_interface, | |
title="LangGraph Chatbot", | |
description="A chatbot powered by LangGraph and hosted on Hugging Face.", | |
theme="compact" | |
) | |
if __name__ == "__main__": | |
iface.launch() |