captain-awesome commited on
Commit
96a8c30
·
verified ·
1 Parent(s): 8189c0c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +48 -18
app.py CHANGED
@@ -55,13 +55,53 @@
55
 
56
 
57
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58
  import gradio as gr
59
- import asyncio
60
- from smolagents.mcp_client import MCPClient
61
- from transformers import AutoModelForCausalLM
62
- import torch
63
  from mcp.client.stdio import StdioServerParameters
64
  from smolagents import InferenceClientModel, CodeAgent, ToolCollection
 
 
65
 
66
  try:
67
  mcp_client = MCPClient(
@@ -71,27 +111,17 @@ try:
71
  )
72
  tools = mcp_client.get_tools()
73
 
74
- model_id = "unsloth/Llama-3.2-1B"
75
- model = AutoModelForCausalLM.from_pretrained(
76
- model_id,
77
- torch_dtype=torch.bfloat16,
78
- device_map="auto"
79
- )
80
-
81
-
82
- agent = CodeAgent(tools=tools, model=model)
83
 
84
  demo = gr.ChatInterface(
85
  fn=lambda message, history: str(agent.run(message)),
86
  type="messages",
 
87
  title="Agent with MCP Tools",
88
- description="This is a simple agent that uses MCP tools to get chapters of the Quran.",
89
  )
90
 
91
-
92
  demo.launch()
93
-
94
- # demo.launch(share=True)
95
-
96
  finally:
97
  mcp_client.disconnect()
 
55
 
56
 
57
 
58
+ # import gradio as gr
59
+ # import asyncio
60
+ # from smolagents.mcp_client import MCPClient
61
+ # from transformers import AutoModelForCausalLM
62
+ # import torch
63
+ # from mcp.client.stdio import StdioServerParameters
64
+ # from smolagents import InferenceClientModel, CodeAgent, ToolCollection
65
+
66
+ # try:
67
+ # mcp_client = MCPClient(
68
+ # ## Try this working example on the hub:
69
+ # # {"url": "https://abidlabs-mcp-tools.hf.space/gradio_api/mcp/sse"}
70
+ # {"url": "http://localhost:7860/gradio_api/mcp/sse"}
71
+ # )
72
+ # tools = mcp_client.get_tools()
73
+
74
+ # model_id = "unsloth/Llama-3.2-1B"
75
+ # model = AutoModelForCausalLM.from_pretrained(
76
+ # model_id,
77
+ # torch_dtype=torch.bfloat16,
78
+ # device_map="auto"
79
+ # )
80
+
81
+
82
+ # agent = CodeAgent(tools=tools, model=model)
83
+
84
+ # demo = gr.ChatInterface(
85
+ # fn=lambda message, history: str(agent.run(message)),
86
+ # type="messages",
87
+ # title="Agent with MCP Tools",
88
+ # description="This is a simple agent that uses MCP tools to get chapters of the Quran.",
89
+ # )
90
+
91
+
92
+ # demo.launch()
93
+
94
+ # # demo.launch(share=True)
95
+
96
+ # finally:
97
+ # mcp_client.disconnect()
98
+
99
  import gradio as gr
100
+
 
 
 
101
  from mcp.client.stdio import StdioServerParameters
102
  from smolagents import InferenceClientModel, CodeAgent, ToolCollection
103
+ from smolagents.mcp_client import MCPClient
104
+
105
 
106
  try:
107
  mcp_client = MCPClient(
 
111
  )
112
  tools = mcp_client.get_tools()
113
 
114
+ model = InferenceClientModel()
115
+ agent = CodeAgent(tools=[*tools], model=model)
 
 
 
 
 
 
 
116
 
117
  demo = gr.ChatInterface(
118
  fn=lambda message, history: str(agent.run(message)),
119
  type="messages",
120
+ examples=["Prime factorization of 68"],
121
  title="Agent with MCP Tools",
122
+ description="This is a simple agent that uses MCP tools to answer questions.",
123
  )
124
 
 
125
  demo.launch()
 
 
 
126
  finally:
127
  mcp_client.disconnect()