captain-awesome commited on
Commit
d9ecb54
·
verified ·
1 Parent(s): 88662e1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -9
app.py CHANGED
@@ -97,27 +97,24 @@
97
  # mcp_client.disconnect()
98
 
99
  import gradio as gr
 
100
 
101
- from mcp.client.stdio import StdioServerParameters
102
- from smolagents import InferenceClientModel, CodeAgent, ToolCollection
103
- from smolagents.mcp_client import MCPClient
104
 
105
 
106
  try:
107
  mcp_client = MCPClient(
108
- ## Try this working example on the hub:
109
- {"url": "https://abidlabs-mcp-tools.hf.space/gradio_api/mcp/sse"}
110
- # {"url": "http://localhost:7860/gradio_api/mcp/sse"}
111
  )
112
  tools = mcp_client.get_tools()
113
 
114
- model = InferenceClientModel()
115
- agent = CodeAgent(tools=[*tools], model=model)
116
 
117
  demo = gr.ChatInterface(
118
  fn=lambda message, history: str(agent.run(message)),
119
  type="messages",
120
- examples=["Prime factorization of 68"],
121
  title="Agent with MCP Tools",
122
  description="This is a simple agent that uses MCP tools to answer questions.",
123
  )
 
97
  # mcp_client.disconnect()
98
 
99
  import gradio as gr
100
+ import os
101
 
102
+ from smolagents import InferenceClientModel, CodeAgent, MCPClient
 
 
103
 
104
 
105
  try:
106
  mcp_client = MCPClient(
107
+ {"url": "https://abidlabs-mcp-tool-http.hf.space/gradio_api/mcp/sse"}
 
 
108
  )
109
  tools = mcp_client.get_tools()
110
 
111
+ model = InferenceClientModel(token=os.getenv("HUGGINGFACE_API_TOKEN"))
112
+ agent = CodeAgent(tools=[*tools], model=model, additional_authorized_imports=["json", "ast", "urllib", "base64"])
113
 
114
  demo = gr.ChatInterface(
115
  fn=lambda message, history: str(agent.run(message)),
116
  type="messages",
117
+ examples=["Analyze the sentiment of the following text 'This is awesome'"],
118
  title="Agent with MCP Tools",
119
  description="This is a simple agent that uses MCP tools to answer questions.",
120
  )