omvishesh's picture
Update app.py
19ee546 verified
from typing import TypedDict, Dict
from langgraph.graph import StateGraph, END
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables.graph import MermaidDrawMethod
from IPython.display import display, Image
class State(TypedDict):
query : str
category : str
sentiment : str
response : str
from langchain_groq import ChatGroq
llm = ChatGroq(
temperature = 0,
groq_api_key = "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",
model_name = "llama-3.3-70b-versatile"
)
result = llm.invoke("what is langchain")
result.content
def categorize(state: State) -> State:
"Technical, Billing, General"
prompt = ChatPromptTemplate.from_template(
"Categorize the following customer query into one of these categories: "
"Technical, Billing, General. Query: {query}"
)
chain = prompt | llm
category = chain.invoke({"query": state["query"]}).content
return {"category": category}
def analyze_sentiment(state: State) -> State:
prompt = ChatPromptTemplate.from_template(
"Analyze the sentiment of the following customer query"
"Response with either 'Positive', 'Neutral' , or 'Negative'. Query: {query}"
)
chain = prompt | llm
sentiment = chain.invoke({"query": state["query"]}).content
return {"sentiment": sentiment}
def handle_technical(state: State)->State:
prompt = ChatPromptTemplate.from_template(
"Provide a technical support response to the following query : {query}"
)
chain = prompt | llm
response = chain.invoke({"query": state["query"]}).content
return {"response": response}
def handle_billing(state: State)->State:
prompt = ChatPromptTemplate.from_template(
"Provide a billing support response to the following query : {query}"
)
chain = prompt | llm
response = chain.invoke({"query": state["query"]}).content
return {"response": response}
def handle_general(state: State)->State:
prompt = ChatPromptTemplate.from_template(
"Provide a general support response to the following query : {query}"
)
chain = prompt | llm
response = chain.invoke({"query": state["query"]}).content
return {"response": response}
def escalate(state: State)->State:
return {"response": "This query has been escalate to a human agent due to its negative sentiment"}
def route_query(state: State)->State:
if state["sentiment"] == "Negative":
return "escalate"
elif state["category"] == "Technical":
return "handle_technical"
elif state["category"] == "Billing":
return "handle_billing"
else:
return "handle_general"
workflow = StateGraph(State)
workflow.add_node("categorize", categorize)
workflow.add_node("analyze_sentiment", analyze_sentiment)
workflow.add_node("handle_technical", handle_technical)
workflow.add_node("handle_billing", handle_billing)
workflow.add_node("handle_general", handle_general)
workflow.add_node("escalate", escalate)
workflow.add_edge("categorize", "analyze_sentiment")
workflow.add_conditional_edges(
"analyze_sentiment",
route_query,{
"handle_technical" : "handle_technical",
"handle_billing" : "handle_billing",
"handle_general" : "handle_general",
"escalate": "escalate"
}
)
workflow.add_edge("handle_technical", END)
workflow.add_edge("handle_billing", END)
workflow.add_edge("handle_general", END)
workflow.add_edge("escalate", END)
workflow.set_entry_point("categorize")
app = workflow.compile()
def run_customer_support(query: str)->Dict[str, str]:
results = app.invoke({"query": query})
return {
"category":results['category'],
"sentiment":results['sentiment'],
"response": results['response']
}
# query = "my laptop is not charging what should i do?"
# result = run_customer_support(query)
# print(f"Query: {query}")
# print(f"Category : {result['category']}")
# print(f"Sentiment : {result['sentiment']}")
# print(f"Response : {result['response']}")
import gradio as gr
# Define the function that integrates the workflow.
def run_customer_support(query: str) -> Dict[str, str]:
results = app.invoke({"query": query})
return {
"Category": results['category'],
"Sentiment": results['sentiment'],
"Response": results['response']
}
# Create the Gradio interface
def gradio_interface(query: str):
result = run_customer_support(query)
return (
f"**Category:** {result['Category']}\n\n"
f"**Sentiment:** {result['Sentiment']}\n\n"
f"**Response:** {result['Response']}"
)
# Build the Gradio app
gui = gr.Interface(
fn=gradio_interface,
theme='Yntec/HaleyCH_Theme_Orange_Green',
inputs=gr.Textbox(lines=2, placeholder="Enter your query here..."),
outputs=gr.Markdown(),
title="Customer Support Assistant",
description="Provide a query and receive a categorized response. The system analyzes sentiment and routes to the appropriate support channel.",
)
# Launch the app
if __name__ == "__main__":
gui.launch(share=True)