Spaces:
Sleeping
Sleeping
File size: 838 Bytes
5382e09 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 |
import gradio as gr
import os
from dotenv import load_dotenv
from mcp import StdioServerParameters
from smolagents import InferenceClientModel, CodeAgent, ToolCollection,MCPClient
try:
mcp_client = MCPClient(
{
"url": "https://abidlabs-mcp-tool-http.hf.space/gradio_api/mcp/sse"
}
)
tools = mcp_client.get_tools()
load_dotenv()
model = InferenceClientModel(token=os.getenv("HF_TOKEN"))
agent = CodeAgent(tools=[*tools], model=model)
demo = gr.ChatInterface(
fn= lambda message, history: str(agent.run(message)),
type='messages',
examples=['prime factorization of 68'],
title='agent with mcp server',
description='this is a simple agent that uses MCP tools to answer questions'
)
demo.launch()
finally:
mcp_client.disconnect() |