import os import gradio as gr from smolagents import InferenceClientModel, CodeAgent, MCPClient token = os.getenv("HF_TOKEN") if token: print(f"Token set: {token[:3]}...") try: mcp_client = MCPClient( # mcp server created in previous section {"url": "http://localhost:7860/gradio_api/mcp/sse"} # {"url": "https://ggsmith-mcp-sentiment.hf.space/gradio_api/mcp/sse"} ) tools = mcp_client.get_tools() model = InferenceClientModel(token=os.getenv("HF_TOKEN")) agent = CodeAgent(tools=[*tools], model=model) demo = gr.ChatInterface( fn=lambda message, history: str(agent.run(message)), type="messages", examples=["Prime factorization of 68"], title="Agent with MCP Tools", description="This is a simple agent that uses MCP tools to answer questions.", ) demo.launch() finally: mcp_client.disconnect()