File size: 2,664 Bytes
bc40118 7404322 bc40118 7404322 bc40118 4f225ef 8189c0c 7404322 14fff99 695f9e9 14fff99 9b51b5d bc40118 9b51b5d bc40118 4434f84 14fff99 9b51b5d bc40118 9b51b5d 7404322 14fff99 ef0c72b 7404322 6a73182 7404322 bab0dab 14fff99 7404322 14fff99 bc40118 14fff99 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 |
# import gradio as gr
# from mcp.client.stdio import StdioServerParameters
# from smolagents import InferenceClientModel, CodeAgent
# from smolagents.mcp_client import MCPClient
# from transformers import pipeline
# from transformers import AutoModelForCausalLM, AutoTokenizer
# import torch
# # Initialize the MCP client correctly
# try:
# mcp_client = MCPClient(
# ## Try this working example on the hub:
# # {"url": "https://abidlabs-mcp-tools.hf.space/gradio_api/mcp/sse"}
# {"url": "https://captain-awesome-alquranchapters.hf.space/gradio_api/mcp/sse"}
# )
# tools = mcp_client.get_tools()
# # model = InferenceClientModel()
# # model = TransformersModel(
# # model_id="Qwen/Qwen2.5-Coder-32B-Instruct",
# # device="cuda",
# # max_new_tokens=5000,
# # )
# model_id = "unsloth/Llama-3.2-1B"
# model = AutoModelForCausalLM.from_pretrained(
# model_id,
# torch_dtype=torch.bfloat16,
# device_map="auto"
# )
# agent = CodeAgent(tools=[*tools], model=model)
# # Define Gradio ChatInterface
# demo = gr.ChatInterface(
# fn=lambda message, history: str(agent.run(message)),
# type="messages",
# title="Agent with MCP Tools",
# description="This is a simple agent that uses MCP tools to get chapters of the Quran.",
# )
# demo.launch(share=True)
# finally:
# # Properly close the MCP client connection
# # if 'mcp_client' in locals():
# # mcp_client.disconnect()
# mcp_client.disconnect()
import gradio as gr
import asyncio
from smolagents.mcp_client import MCPClient
from transformers import AutoModelForCausalLM
import torch
from mcp.client.stdio import StdioServerParameters
from smolagents import InferenceClientModel, CodeAgent, ToolCollection
try:
mcp_client = MCPClient(
## Try this working example on the hub:
# {"url": "https://abidlabs-mcp-tools.hf.space/gradio_api/mcp/sse"}
{"url": "http://localhost:7860/gradio_api/mcp/sse"}
)
tools = mcp_client.get_tools()
model_id = "unsloth/Llama-3.2-1B"
model = AutoModelForCausalLM.from_pretrained(
model_id,
torch_dtype=torch.bfloat16,
device_map="auto"
)
agent = CodeAgent(tools=tools, model=model)
demo = gr.ChatInterface(
fn=lambda message, history: str(agent.run(message)),
type="messages",
title="Agent with MCP Tools",
description="This is a simple agent that uses MCP tools to get chapters of the Quran.",
)
demo.launch()
# demo.launch(share=True)
finally:
mcp_client.disconnect() |