captain-awesome commited on
Commit
10a9d38
·
verified ·
1 Parent(s): b269a28

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +55 -51
app.py CHANGED
@@ -55,71 +55,75 @@
55
 
56
 
57
 
58
- # import gradio as gr
59
- # import asyncio
60
- # from smolagents.mcp_client import MCPClient
61
- # from transformers import AutoModelForCausalLM
62
- # import torch
63
- # from mcp.client.stdio import StdioServerParameters
64
- # from smolagents import InferenceClientModel, CodeAgent, ToolCollection
65
-
66
- # try:
67
- # mcp_client = MCPClient(
68
- # ## Try this working example on the hub:
69
- # # {"url": "https://abidlabs-mcp-tools.hf.space/gradio_api/mcp/sse"}
70
- # {"url": "http://localhost:7860/gradio_api/mcp/sse"}
71
- # )
72
- # tools = mcp_client.get_tools()
73
-
74
- # model_id = "unsloth/Llama-3.2-1B"
75
- # model = AutoModelForCausalLM.from_pretrained(
76
- # model_id,
77
- # torch_dtype=torch.bfloat16,
78
- # device_map="auto"
79
- # )
80
-
81
-
82
- # agent = CodeAgent(tools=tools, model=model)
83
-
84
- # demo = gr.ChatInterface(
85
- # fn=lambda message, history: str(agent.run(message)),
86
- # type="messages",
87
- # title="Agent with MCP Tools",
88
- # description="This is a simple agent that uses MCP tools to get chapters of the Quran.",
89
- # )
90
-
91
-
92
- # demo.launch()
93
-
94
- # # demo.launch(share=True)
95
-
96
- # finally:
97
- # mcp_client.disconnect()
98
-
99
  import gradio as gr
100
- import os
101
-
102
- from smolagents import InferenceClientModel, CodeAgent, MCPClient
103
-
 
 
104
 
105
  try:
106
  mcp_client = MCPClient(
107
- # {"url": "https://abidlabs-mcp-tool-http.hf.space/gradio_api/mcp/sse"}
108
- {"url":"https://captain-awesome-alquranchapters.hf.space/gradio_api/mcp/sse"}
 
109
  )
110
  tools = mcp_client.get_tools()
111
 
112
  model = InferenceClientModel(token=os.getenv("HUGGINGFACE_API_TOKEN"))
113
- agent = CodeAgent(tools=[*tools], model=model, additional_authorized_imports=["json", "ast", "urllib", "base64"])
 
 
 
 
 
 
 
 
114
 
115
  demo = gr.ChatInterface(
116
  fn=lambda message, history: str(agent.run(message)),
117
  type="messages",
118
- examples=["Analyze the sentiment of the following text 'This is awesome'"],
119
  title="Agent with MCP Tools",
120
- description="This is a simple agent that uses MCP tools to answer questions.",
121
  )
122
 
 
123
  demo.launch()
 
 
 
124
  finally:
125
- mcp_client.disconnect()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
55
 
56
 
57
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58
  import gradio as gr
59
+ import asyncio
60
+ from smolagents.mcp_client import MCPClient
61
+ from transformers import AutoModelForCausalLM
62
+ import torch
63
+ from mcp.client.stdio import StdioServerParameters
64
+ from smolagents import InferenceClientModel, CodeAgent, ToolCollection
65
 
66
  try:
67
  mcp_client = MCPClient(
68
+ ## Try this working example on the hub:
69
+ # {"url": "https://abidlabs-mcp-tools.hf.space/gradio_api/mcp/sse"}
70
+ {"url": "https://captain-awesome-alquranchapters.hf.space/gradio_api/mcp/sse"}
71
  )
72
  tools = mcp_client.get_tools()
73
 
74
  model = InferenceClientModel(token=os.getenv("HUGGINGFACE_API_TOKEN"))
75
+
76
+ # model_id = "unsloth/Llama-3.2-1B"
77
+ # model = AutoModelForCausalLM.from_pretrained(
78
+ # model_id,
79
+ # torch_dtype=torch.bfloat16,
80
+ # device_map="auto"
81
+ # )
82
+
83
+ # agent = CodeAgent(tools=tools, model=model)
84
 
85
  demo = gr.ChatInterface(
86
  fn=lambda message, history: str(agent.run(message)),
87
  type="messages",
 
88
  title="Agent with MCP Tools",
89
+ description="This is a simple agent that uses MCP tools to get chapters of the Quran.",
90
  )
91
 
92
+
93
  demo.launch()
94
+
95
+ # demo.launch(share=True)
96
+
97
  finally:
98
+ mcp_client.disconnect()
99
+
100
+
101
+
102
+
103
+ # import gradio as gr
104
+ # import os
105
+
106
+ # from smolagents import InferenceClientModel, CodeAgent, MCPClient
107
+
108
+
109
+ # try:
110
+ # mcp_client = MCPClient(
111
+ # # {"url": "https://abidlabs-mcp-tool-http.hf.space/gradio_api/mcp/sse"}
112
+ # {"url":"https://captain-awesome-alquranchapters.hf.space/gradio_api/mcp/sse"}
113
+ # )
114
+ # tools = mcp_client.get_tools()
115
+
116
+ # model = InferenceClientModel(token=os.getenv("HUGGINGFACE_API_TOKEN"))
117
+ # agent = CodeAgent(tools=[*tools], model=model, additional_authorized_imports=["json", "ast", "urllib", "base64"])
118
+
119
+ # demo = gr.ChatInterface(
120
+ # fn=lambda message, history: str(agent.run(message)),
121
+ # type="messages",
122
+ # examples=["Analyze the sentiment of the following text 'This is awesome'"],
123
+ # title="Agent with MCP Tools",
124
+ # description="This is a simple agent that uses MCP tools to answer questions.",
125
+ # )
126
+
127
+ # demo.launch()
128
+ # finally:
129
+ # mcp_client.disconnect()