captain-awesome's picture
Update app.py
9b51b5d verified
raw
history blame
1.41 kB
import gradio as gr
from mcp.client.stdio import StdioServerParameters
from smolagents import InferenceClientModel, CodeAgent
from smolagents.mcp_client import MCPClient
from transformers import pipeline
# Initialize the MCP client correctly
try:
mcp_client = MCPClient(
## Try this working example on the hub:
# {"url": "https://abidlabs-mcp-tools.hf.space/gradio_api/mcp/sse"}
{"url": "https://captain-awesome-alquranchapters.hf.space/gradio_api/mcp/sse"}
)
tools = mcp_client.get_tools()
# model = InferenceClientModel()
# model = TransformersModel(
# model_id="Qwen/Qwen2.5-Coder-32B-Instruct",
# device="cuda",
# max_new_tokens=5000,
# )
model_id = "unsloth/Llama-3.2-1B"
model = AutoModelForCausalLM.from_pretrained(
model_id,
torch_dtype=torch.bfloat16,
device_map="auto"
)
agent = CodeAgent(tools=[*tools], model=model)
# Define Gradio ChatInterface
demo = gr.ChatInterface(
fn=lambda message, history: str(agent.run(message)),
type="messages",
title="Agent with MCP Tools",
description="This is a simple agent that uses MCP tools to get chapters of the Quran.",
)
demo.launch(share=True)
finally:
# Properly close the MCP client connection
# if 'mcp_client' in locals():
# mcp_client.disconnect()
mcp_client.disconnect()