Update app.py
Browse files
app.py
CHANGED
@@ -1,44 +1,87 @@
|
|
1 |
-
import gradio as gr
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
|
3 |
-
|
4 |
-
|
|
|
|
|
|
|
5 |
from smolagents.mcp_client import MCPClient
|
6 |
-
from transformers import
|
7 |
-
from transformers import AutoModelForCausalLM, AutoTokenizer
|
8 |
import torch
|
9 |
|
10 |
-
|
11 |
-
|
12 |
-
mcp_client
|
13 |
-
|
14 |
-
|
15 |
-
{"url": "https://captain-awesome-alquranchapters.hf.space/gradio_api/mcp/sse"}
|
16 |
-
)
|
17 |
-
|
18 |
-
tools = mcp_client.get_tools()
|
19 |
-
|
20 |
-
# model = InferenceClientModel()
|
21 |
-
# model = TransformersModel(
|
22 |
-
# model_id="Qwen/Qwen2.5-Coder-32B-Instruct",
|
23 |
-
# device="cuda",
|
24 |
-
# max_new_tokens=5000,
|
25 |
-
# )
|
26 |
-
model_id = "unsloth/Llama-3.2-1B"
|
27 |
|
|
|
28 |
model = AutoModelForCausalLM.from_pretrained(
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
)
|
33 |
|
|
|
34 |
|
35 |
-
|
|
|
|
|
36 |
|
37 |
-
|
38 |
-
|
39 |
-
# Define Gradio ChatInterface
|
40 |
demo = gr.ChatInterface(
|
41 |
-
fn=
|
42 |
type="messages",
|
43 |
title="Agent with MCP Tools",
|
44 |
description="This is a simple agent that uses MCP tools to get chapters of the Quran.",
|
@@ -46,8 +89,7 @@ try:
|
|
46 |
|
47 |
demo.launch(share=True)
|
48 |
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
mcp_client.disconnect()
|
|
|
1 |
+
# import gradio as gr
|
2 |
+
|
3 |
+
# from mcp.client.stdio import StdioServerParameters
|
4 |
+
# from smolagents import InferenceClientModel, CodeAgent
|
5 |
+
# from smolagents.mcp_client import MCPClient
|
6 |
+
# from transformers import pipeline
|
7 |
+
# from transformers import AutoModelForCausalLM, AutoTokenizer
|
8 |
+
# import torch
|
9 |
+
|
10 |
+
# # Initialize the MCP client correctly
|
11 |
+
# try:
|
12 |
+
# mcp_client = MCPClient(
|
13 |
+
# ## Try this working example on the hub:
|
14 |
+
# # {"url": "https://abidlabs-mcp-tools.hf.space/gradio_api/mcp/sse"}
|
15 |
+
# {"url": "https://captain-awesome-alquranchapters.hf.space/gradio_api/mcp/sse"}
|
16 |
+
# )
|
17 |
+
|
18 |
+
# tools = mcp_client.get_tools()
|
19 |
+
|
20 |
+
# # model = InferenceClientModel()
|
21 |
+
# # model = TransformersModel(
|
22 |
+
# # model_id="Qwen/Qwen2.5-Coder-32B-Instruct",
|
23 |
+
# # device="cuda",
|
24 |
+
# # max_new_tokens=5000,
|
25 |
+
# # )
|
26 |
+
# model_id = "unsloth/Llama-3.2-1B"
|
27 |
+
|
28 |
+
# model = AutoModelForCausalLM.from_pretrained(
|
29 |
+
# model_id,
|
30 |
+
# torch_dtype=torch.bfloat16,
|
31 |
+
# device_map="auto"
|
32 |
+
# )
|
33 |
+
|
34 |
+
|
35 |
+
# agent = CodeAgent(tools=[*tools], model=model)
|
36 |
+
|
37 |
+
|
38 |
+
|
39 |
+
# # Define Gradio ChatInterface
|
40 |
+
# demo = gr.ChatInterface(
|
41 |
+
# fn=lambda message, history: str(agent.run(message)),
|
42 |
+
# type="messages",
|
43 |
+
# title="Agent with MCP Tools",
|
44 |
+
# description="This is a simple agent that uses MCP tools to get chapters of the Quran.",
|
45 |
+
# )
|
46 |
+
|
47 |
+
# demo.launch(share=True)
|
48 |
+
|
49 |
+
# finally:
|
50 |
+
# # Properly close the MCP client connection
|
51 |
+
# # if 'mcp_client' in locals():
|
52 |
+
# # mcp_client.disconnect()
|
53 |
+
# mcp_client.disconnect()
|
54 |
|
55 |
+
|
56 |
+
|
57 |
+
|
58 |
+
import gradio as gr
|
59 |
+
import asyncio
|
60 |
from smolagents.mcp_client import MCPClient
|
61 |
+
from transformers import AutoModelForCausalLM
|
|
|
62 |
import torch
|
63 |
|
64 |
+
async def main():
|
65 |
+
mcp_client = MCPClient({"url": "https://captain-awesome-alquranchapters.hf.space/gradio_api/mcp/sse"})
|
66 |
+
await mcp_client.connect() # or use async context manager if supported
|
67 |
+
|
68 |
+
tools = await mcp_client.get_tools()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
69 |
|
70 |
+
model_id = "unsloth/Llama-3.2-1B"
|
71 |
model = AutoModelForCausalLM.from_pretrained(
|
72 |
+
model_id,
|
73 |
+
torch_dtype=torch.bfloat16,
|
74 |
+
device_map="auto"
|
75 |
)
|
76 |
|
77 |
+
agent = CodeAgent(tools=tools, model=model)
|
78 |
|
79 |
+
async def gradio_fn(message, history):
|
80 |
+
response = await agent.run(message) # assuming run is async
|
81 |
+
return str(response)
|
82 |
|
|
|
|
|
|
|
83 |
demo = gr.ChatInterface(
|
84 |
+
fn=gradio_fn,
|
85 |
type="messages",
|
86 |
title="Agent with MCP Tools",
|
87 |
description="This is a simple agent that uses MCP tools to get chapters of the Quran.",
|
|
|
89 |
|
90 |
demo.launch(share=True)
|
91 |
|
92 |
+
await mcp_client.disconnect()
|
93 |
+
|
94 |
+
if __name__ == "__main__":
|
95 |
+
asyncio.run(main())
|
|