# import gradio as gr | |
# from mcp.client.stdio import StdioServerParameters | |
# from smolagents import InferenceClientModel, CodeAgent | |
# from smolagents.mcp_client import MCPClient | |
# from transformers import pipeline | |
# from transformers import AutoModelForCausalLM, AutoTokenizer | |
# import torch | |
# # Initialize the MCP client correctly | |
# try: | |
# mcp_client = MCPClient( | |
# ## Try this working example on the hub: | |
# # {"url": "https://abidlabs-mcp-tools.hf.space/gradio_api/mcp/sse"} | |
# {"url": "https://captain-awesome-alquranchapters.hf.space/gradio_api/mcp/sse"} | |
# ) | |
# tools = mcp_client.get_tools() | |
# # model = InferenceClientModel() | |
# # model = TransformersModel( | |
# # model_id="Qwen/Qwen2.5-Coder-32B-Instruct", | |
# # device="cuda", | |
# # max_new_tokens=5000, | |
# # ) | |
# model_id = "unsloth/Llama-3.2-1B" | |
# model = AutoModelForCausalLM.from_pretrained( | |
# model_id, | |
# torch_dtype=torch.bfloat16, | |
# device_map="auto" | |
# ) | |
# agent = CodeAgent(tools=[*tools], model=model) | |
# # Define Gradio ChatInterface | |
# demo = gr.ChatInterface( | |
# fn=lambda message, history: str(agent.run(message)), | |
# type="messages", | |
# title="Agent with MCP Tools", | |
# description="This is a simple agent that uses MCP tools to get chapters of the Quran.", | |
# ) | |
# demo.launch(share=True) | |
# finally: | |
# # Properly close the MCP client connection | |
# # if 'mcp_client' in locals(): | |
# # mcp_client.disconnect() | |
# mcp_client.disconnect() | |
import gradio as gr | |
import asyncio | |
from smolagents.mcp_client import MCPClient | |
from transformers import AutoModelForCausalLM | |
import torch | |
async def main(): | |
mcp_client = MCPClient({"url": "https://captain-awesome-alquranchapters.hf.space/gradio_api/mcp/sse"}) | |
await mcp_client.connect() # or use async context manager if supported | |
tools = await mcp_client.get_tools() | |
model_id = "unsloth/Llama-3.2-1B" | |
model = AutoModelForCausalLM.from_pretrained( | |
model_id, | |
torch_dtype=torch.bfloat16, | |
device_map="auto" | |
) | |
agent = CodeAgent(tools=tools, model=model) | |
async def gradio_fn(message, history): | |
response = await agent.run(message) # assuming run is async | |
return str(response) | |
demo = gr.ChatInterface( | |
fn=gradio_fn, | |
type="messages", | |
title="Agent with MCP Tools", | |
description="This is a simple agent that uses MCP tools to get chapters of the Quran.", | |
) | |
demo.launch(share=True) | |
await mcp_client.disconnect() | |
if __name__ == "__main__": | |
asyncio.run(main()) |