Spaces:
Sleeping
Sleeping
File size: 709 Bytes
2c98d8b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 |
import gradio as gr
import os
from mcp import StdioServerParameters
from smolagents import InferenceClientModel, CodeAgent, ToolCollection, MCPClient
mcp_client = MCPClient(
{
"url": "https://abidlabs-mcp-tool-http.hf.space/gradio_api/mcp/sse"
}
)
tools = mcp_client.get_tools()
model = InferenceClientModel(token=os.getenv("HF_TOKEN"))
agent = CodeAgent(tools=[*tools], model=model)
demo = gr.ChatInterface(
fn= lambda message, history: str(agent.run(message)),
type="messages",
examples=["Prime factorization of 68"],
title="Agent with MCP tool",
description="This agent can use the MCP tool to answer questions.",
)
demo.launch()
finally:
mcp_client.disconnect() |