Spaces:
Running
Running
largo
commited on
Commit
·
86b983b
1
Parent(s):
7e260ba
Deploy server
Browse files- .continue/mcpServers/playwright-mcp.yaml +8 -0
- .continue/models/llama-max.yaml +12 -0
- README.md +6 -0
- gradio-client.py +26 -0
.continue/mcpServers/playwright-mcp.yaml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
name: Playwright mcpServer
|
2 |
+
version: 0.0.1
|
3 |
+
schema: v1
|
4 |
+
mcpServers:
|
5 |
+
- name: Browser search
|
6 |
+
command: npx
|
7 |
+
args:
|
8 |
+
- "@playwright/mcp@latest"
|
.continue/models/llama-max.yaml
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
name: Ollama Llama model
|
2 |
+
version: 0.0.1
|
3 |
+
schema: v1
|
4 |
+
models:
|
5 |
+
- provider: ollama
|
6 |
+
model: llama3.1:8b
|
7 |
+
defaultCompletionOptions:
|
8 |
+
contextLength: 128000
|
9 |
+
name: a llama3.1:8b max
|
10 |
+
roles:
|
11 |
+
- chat
|
12 |
+
- edit
|
README.md
CHANGED
@@ -12,3 +12,9 @@ short_description: Unit 2 MCP course
|
|
12 |
---
|
13 |
|
14 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12 |
---
|
13 |
|
14 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
15 |
+
|
16 |
+
# Deps
|
17 |
+
|
18 |
+
```bash
|
19 |
+
pip install "smolagents[mcp]" "gradio[mcp]" mcp fastmcp
|
20 |
+
```
|
gradio-client.py
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import os
|
3 |
+
|
4 |
+
from smolagents import InferenceClientModel, CodeAgent, MCPClient
|
5 |
+
|
6 |
+
|
7 |
+
try:
|
8 |
+
mcp_client = MCPClient(
|
9 |
+
{"url": "https://abidlabs-mcp-tool-http.hf.space/gradio_api/mcp/sse"}
|
10 |
+
)
|
11 |
+
tools = mcp_client.get_tools()
|
12 |
+
|
13 |
+
model = InferenceClientModel(token=os.getenv("HF_HUB_TOKEN"))
|
14 |
+
agent = CodeAgent(tools=[*tools], model=model, additional_authorized_imports=["json", "ast", "urllib", "base64"])
|
15 |
+
|
16 |
+
demo = gr.ChatInterface(
|
17 |
+
fn=lambda message, history: str(agent.run(message)),
|
18 |
+
type="messages",
|
19 |
+
examples=["Analyze the sentiment of the following text 'This is awesome'"],
|
20 |
+
title="Agent with MCP Tools",
|
21 |
+
description="This is a simple agent that uses MCP tools to answer questions.",
|
22 |
+
)
|
23 |
+
|
24 |
+
demo.launch()
|
25 |
+
finally:
|
26 |
+
mcp_client.disconnect()
|