Spaces:
Sleeping
Sleeping
File size: 1,089 Bytes
1f0f025 0670c5b 7802f3a 0670c5b 11882a9 0670c5b 94eb831 0670c5b 94eb831 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 |
import os
import gradio as gr
from mcp.client.stdio import StdioServerParameters
from smolagents import ToolCollection, CodeAgent
from smolagents import CodeAgent, InferenceClientModel
from smolagents.mcp_client import MCPClient
token = os.environ.get("HF_TOKEN")
try:
mcp_client = MCPClient(
# {"url": "https://abidlabs-mcp-tools.hf.space/gradio_api/mcp/sse"}
{"url": "https://rajeshthangaraj1-mcp-sentiment.hf.space/gradio_api/mcp/sse"}
)
tools = mcp_client.get_tools()
model = InferenceClientModel(token=token)
agent = CodeAgent(tools=[*tools], model=model)
def call_agent(message, history):
return str(agent.run(message))
demo = gr.ChatInterface(
fn=lambda message, history: str(agent.run(message)),
type="messages",
examples=["Prime factorization of 68"],
title="Agent with MCP Tools",
description="This is a simple agent that uses MCP tools to answer questions.",
#messages=[],
)
demo.launch()
finally:
if hasattr(mcp_client, "close"):
mcp_client.close()
|