|
import gradio as gr |
|
|
|
from mcp.client.stdio import StdioServerParameters |
|
from smolagents import InferenceClientModel, CodeAgent |
|
from smolagents.mcp_client import MCPClient |
|
from transformers import pipeline |
|
|
|
|
|
try: |
|
mcp_client = MCPClient( |
|
|
|
|
|
{"url": "https://captain-awesome-alquranchapters.hf.space/gradio_api/mcp/sse"} |
|
) |
|
|
|
tools = mcp_client.get_tools() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
model_id = "unsloth/Llama-3.2-1B" |
|
|
|
model = AutoModelForCausalLM.from_pretrained( |
|
model_id, |
|
torch_dtype=torch.bfloat16, |
|
device_map="auto" |
|
) |
|
|
|
|
|
agent = CodeAgent(tools=[*tools], model=model) |
|
|
|
|
|
|
|
|
|
demo = gr.ChatInterface( |
|
fn=lambda message, history: str(agent.run(message)), |
|
type="messages", |
|
title="Agent with MCP Tools", |
|
description="This is a simple agent that uses MCP tools to get chapters of the Quran.", |
|
) |
|
|
|
demo.launch(share=True) |
|
|
|
finally: |
|
|
|
|
|
|
|
mcp_client.disconnect() |
|
|