Spaces:
Paused
Paused
File size: 907 Bytes
de1645f 4331d5a 1465f70 b918492 9f1f5b4 b918492 4331d5a 1465f70 4331d5a 1465f70 4331d5a 9978fe9 4331d5a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 |
import os
import gradio as gr
from smolagents import InferenceClientModel, CodeAgent, MCPClient
token = os.getenv("HF_TOKEN")
if token:
print(f"Token set: {token[:3]}...")
try:
mcp_client = MCPClient(
# mcp server created in previous section
{"url": "http://localhost:7860/gradio_api/mcp/sse"}
# {"url": "https://ggsmith-mcp-sentiment.hf.space/gradio_api/mcp/sse"}
)
tools = mcp_client.get_tools()
model = InferenceClientModel(token=os.getenv("HF_TOKEN"))
agent = CodeAgent(tools=[*tools], model=model)
demo = gr.ChatInterface(
fn=lambda message, history: str(agent.run(message)),
type="messages",
examples=["Prime factorization of 68"],
title="Agent with MCP Tools",
description="This is a simple agent that uses MCP tools to answer questions.",
)
demo.launch()
finally:
mcp_client.disconnect() |