Spaces:
Sleeping
Sleeping
Upload 5 files
Browse files- examples/sandbox/__init__.py +0 -0
- examples/sandbox/mcp.json +88 -0
- examples/sandbox/run-inner-tool-test.py +120 -0
- examples/sandbox/run-test.py +59 -0
- examples/sandbox/test_inner_tool.py +76 -0
examples/sandbox/__init__.py
ADDED
|
File without changes
|
examples/sandbox/mcp.json
ADDED
|
@@ -0,0 +1,88 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"mcpServers": {
|
| 3 |
+
"amap-amap-sse": {
|
| 4 |
+
"type": "sse",
|
| 5 |
+
"url": "https://mcp.amap.com/sse?key=${AMAP_AMAP_SSE_KEY}",
|
| 6 |
+
"timeout": 5.0,
|
| 7 |
+
"sse_read_timeout": 300.0
|
| 8 |
+
},
|
| 9 |
+
"tavily-mcp": {
|
| 10 |
+
"type": "stdio",
|
| 11 |
+
"command": "npx",
|
| 12 |
+
"args": ["-y", "[email protected]"],
|
| 13 |
+
"env": {
|
| 14 |
+
"TAVILY_API_KEY": "tvly-dev-"
|
| 15 |
+
}
|
| 16 |
+
},
|
| 17 |
+
"aworldsearch_server": {
|
| 18 |
+
"type": "function_tool"
|
| 19 |
+
},
|
| 20 |
+
"aworldsearch_server1": {
|
| 21 |
+
"command": "python",
|
| 22 |
+
"args": [
|
| 23 |
+
"-m",
|
| 24 |
+
"mcp_servers.aworldsearch_server"
|
| 25 |
+
],
|
| 26 |
+
"env": {
|
| 27 |
+
"AWORLD_SEARCH_URL": "${AWORLD_SEARCH_URL}",
|
| 28 |
+
"AWORLD_SEARCH_TOTAL_NUM": "${AWORLD_SEARCH_TOTAL_NUM}",
|
| 29 |
+
"AWORLD_SEARCH_SLICE_NUM": "${AWORLD_SEARCH_SLICE_NUM}",
|
| 30 |
+
"AWORLD_SEARCH_DOMAIN": "${AWORLD_SEARCH_DOMAIN}",
|
| 31 |
+
"AWORLD_SEARCH_SEARCHMODE": "${AWORLD_SEARCH_SEARCHMODE}",
|
| 32 |
+
"AWORLD_SEARCH_SOURCE": "${AWORLD_SEARCH_SOURCE}",
|
| 33 |
+
"AWORLD_SEARCH_UID": "${AWORLD_SEARCH_UID}"
|
| 34 |
+
}
|
| 35 |
+
},
|
| 36 |
+
"picsearch_server": {
|
| 37 |
+
"command": "python",
|
| 38 |
+
"args": [
|
| 39 |
+
"-m",
|
| 40 |
+
"mcp_servers.picsearch_server"
|
| 41 |
+
],
|
| 42 |
+
"env": {
|
| 43 |
+
"PIC_SEARCH_URL": "${PIC_SEARCH_URL}",
|
| 44 |
+
"PIC_SEARCH_TOTAL_NUM": "${PIC_SEARCH_TOTAL_NUM}",
|
| 45 |
+
"PIC_SEARCH_SLICE_NUM": "${PIC_SEARCH_SLICE_NUM}",
|
| 46 |
+
"PIC_SEARCH_DOMAIN": "${PIC_SEARCH_DOMAIN}",
|
| 47 |
+
"PIC_SEARCH_SEARCHMODE": "${PIC_SEARCH_SEARCHMODE}",
|
| 48 |
+
"PIC_SEARCH_SOURCE": "${PIC_SEARCH_SOURCE}"
|
| 49 |
+
}
|
| 50 |
+
},
|
| 51 |
+
"gen_audio_server": {
|
| 52 |
+
"command": "python",
|
| 53 |
+
"args": [
|
| 54 |
+
"-m",
|
| 55 |
+
"mcp_servers.gen_audio_server"
|
| 56 |
+
],
|
| 57 |
+
"env": {
|
| 58 |
+
"AUDIO_TASK_URL": "${AUDIO_TASK_URL}",
|
| 59 |
+
"AUDIO_QUERY_URL": "${AUDIO_QUERY_URL}",
|
| 60 |
+
"AUDIO_APP_KEY": "${AUDIO_APP_KEY}",
|
| 61 |
+
"AUDIO_SECRET": "${AUDIO_SECRET}",
|
| 62 |
+
"AUDIO_SAMPLE_RATE": "${AUDIO_SAMPLE_RATE}",
|
| 63 |
+
"AUDIO_AUDIO_FORMAT": "${AUDIO_AUDIO_FORMAT}",
|
| 64 |
+
"AUDIO_TTS_VOICE": "${AUDIO_TTS_VOICE}",
|
| 65 |
+
"AUDIO_TTS_SPEECH_RATE": "${AUDIO_TTS_SPEECH_RATE}",
|
| 66 |
+
"AUDIO_TTS_VOLUME": "${AUDIO_TTS_VOLUME}",
|
| 67 |
+
"AUDIO_TTS_PITCH": "${AUDIO_TTS_PITCH}",
|
| 68 |
+
"AUDIO_VOICE_TYPE": "${AUDIO_VOICE_TYPE}"
|
| 69 |
+
}
|
| 70 |
+
},
|
| 71 |
+
"gen_video_server": {
|
| 72 |
+
"command": "python",
|
| 73 |
+
"args": [
|
| 74 |
+
"-m",
|
| 75 |
+
"mcp_servers.gen_video_server"
|
| 76 |
+
],
|
| 77 |
+
"env": {
|
| 78 |
+
"DASHSCOPE_API_KEY": "${DASHSCOPE_API_KEY}",
|
| 79 |
+
"DASHSCOPE_VIDEO_SUBMIT_URL": "${DASHSCOPE_VIDEO_SUBMIT_URL}",
|
| 80 |
+
"DASHSCOPE_QUERY_BASE_URL": "${DASHSCOPE_QUERY_BASE_URL}",
|
| 81 |
+
"DASHSCOPE_VIDEO_MODEL": "${DASHSCOPE_VIDEO_MODEL}",
|
| 82 |
+
"DASHSCOPE_VIDEO_SIZE": "${DASHSCOPE_VIDEO_SIZE}",
|
| 83 |
+
"DASHSCOPE_VIDEO_SLEEP_TIME": "${DASHSCOPE_VIDEO_SLEEP_TIME}",
|
| 84 |
+
"DASHSCOPE_VIDEO_RETRY_TIMES": "${DASHSCOPE_VIDEO_RETRY_TIMES}"
|
| 85 |
+
}
|
| 86 |
+
}
|
| 87 |
+
}
|
| 88 |
+
}
|
examples/sandbox/run-inner-tool-test.py
ADDED
|
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# coding: utf-8
|
| 2 |
+
# Copyright (c) 2025 inclusionAI.
|
| 3 |
+
import asyncio
|
| 4 |
+
import json
|
| 5 |
+
import logging
|
| 6 |
+
import os
|
| 7 |
+
|
| 8 |
+
from dotenv import load_dotenv
|
| 9 |
+
|
| 10 |
+
from aworld.agents.llm_agent import Agent
|
| 11 |
+
from aworld.config.conf import AgentConfig, TaskConfig
|
| 12 |
+
from aworld.core.task import Task
|
| 13 |
+
|
| 14 |
+
from aworld.runner import Runners
|
| 15 |
+
from aworld.runners.callback.decorator import reg_callback
|
| 16 |
+
from aworld.tools.mcp_tool import async_mcp_tool
|
| 17 |
+
from examples.sandbox.inner_tools import aworldsearch_function_tools
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
@reg_callback("print_content")
|
| 23 |
+
def simple_callback(content):
|
| 24 |
+
"""Simple callback function, prints content and returns it
|
| 25 |
+
|
| 26 |
+
Args:
|
| 27 |
+
content: Content to print
|
| 28 |
+
|
| 29 |
+
Returns:
|
| 30 |
+
The input content
|
| 31 |
+
"""
|
| 32 |
+
print(f"callback content: {content}")
|
| 33 |
+
return content
|
| 34 |
+
|
| 35 |
+
async def run():
|
| 36 |
+
load_dotenv()
|
| 37 |
+
llm_provider = os.getenv("LLM_PROVIDER_WEATHER", "openai")
|
| 38 |
+
llm_model_name = os.getenv("LLM_MODEL_NAME_WEATHER")
|
| 39 |
+
llm_api_key = os.getenv("LLM_API_KEY_WEATHER")
|
| 40 |
+
llm_base_url = os.getenv("LLM_BASE_URL_WEATHER")
|
| 41 |
+
llm_temperature = os.getenv("LLM_TEMPERATURE_WEATHER", 0.0)
|
| 42 |
+
|
| 43 |
+
agent_config = AgentConfig(
|
| 44 |
+
llm_provider=llm_provider,
|
| 45 |
+
llm_model_name=llm_model_name,
|
| 46 |
+
llm_api_key=llm_api_key,
|
| 47 |
+
llm_base_url=llm_base_url,
|
| 48 |
+
llm_temperature=llm_temperature,
|
| 49 |
+
)
|
| 50 |
+
#mcp_servers = ["filewrite_server", "fileread_server"]
|
| 51 |
+
#mcp_servers = ["amap-amap-sse","filewrite_server", "fileread_server"]
|
| 52 |
+
#mcp_servers = ["file_server"]
|
| 53 |
+
#mcp_servers = ["amap-amap-sse"]
|
| 54 |
+
mcp_servers = ["aworldsearch_server"]
|
| 55 |
+
#mcp_servers = ["gen_video_server"]
|
| 56 |
+
# mcp_servers = ["picsearch_server"]
|
| 57 |
+
#mcp_servers = ["gen_audio_server"]
|
| 58 |
+
#mcp_servers = ["playwright"]
|
| 59 |
+
#mcp_servers = ["tavily-mcp"]
|
| 60 |
+
|
| 61 |
+
path_cwd = os.path.dirname(os.path.abspath(__file__))
|
| 62 |
+
mcp_path = os.path.join(path_cwd, "mcp.json")
|
| 63 |
+
with open(mcp_path, "r") as f:
|
| 64 |
+
mcp_config = json.load(f)
|
| 65 |
+
|
| 66 |
+
print("-------------------mcp_config--------------",mcp_config)
|
| 67 |
+
|
| 68 |
+
#sand_box = Sandbox(mcp_servers=mcp_servers,mcp_config=mcp_config)
|
| 69 |
+
# You can specify sandbox
|
| 70 |
+
#sand_box = Sandbox(mcp_servers=mcp_servers, mcp_config=mcp_config,env_type=SandboxEnvType.K8S)
|
| 71 |
+
#sand_box = Sandbox(mcp_servers=mcp_servers, mcp_config=mcp_config,env_type=SandboxEnvType.SUPERCOMPUTER)
|
| 72 |
+
|
| 73 |
+
search_sys_prompt = "You are a versatile assistant"
|
| 74 |
+
search = Agent(
|
| 75 |
+
conf=agent_config,
|
| 76 |
+
name="search_agent",
|
| 77 |
+
system_prompt=search_sys_prompt,
|
| 78 |
+
mcp_config=mcp_config,
|
| 79 |
+
mcp_servers=mcp_servers,
|
| 80 |
+
#sandbox=sand_box,
|
| 81 |
+
)
|
| 82 |
+
|
| 83 |
+
# Run agent
|
| 84 |
+
# Runners.sync_run(input="Use tavily-mcp to check what tourist attractions are in Hangzhou", agent=search)
|
| 85 |
+
task = Task(
|
| 86 |
+
# input="Use tavily-mcp to check what tourist attractions are in Hangzhou",
|
| 87 |
+
# input="Use the file_server tool to analyze this audio link: https://amap-aibox-data.oss-cn-zhangjiakou.aliyuncs.com/.mp3",
|
| 88 |
+
# input="Use the amap-amap-sse tool to find hotels within one kilometer of West Lake in Hangzhou",
|
| 89 |
+
input="Use the aworldsearch_server tool to search for the origin of the Dragon Boat Festival",
|
| 90 |
+
# input="Use the picsearch_server tool to search for Captain America",
|
| 91 |
+
# input="Make sure to use the human_confirm tool to let the user confirm this message: 'Do you want to make a payment to this customer'",
|
| 92 |
+
# input="Use the gen_audio_server tool to convert this sentence to audio: 'Nice to meet you'",
|
| 93 |
+
#input="Use the gen_video_server tool to generate a video of this description: 'A cat walking alone on a snowy day'",
|
| 94 |
+
#input="How's the weather in New York, Shanghai, and Beijing right now? These are three cities, I hope the large model returns three tools when it identifies tool calls",
|
| 95 |
+
# input="First call the filewrite_server tool, then call the fileread_server tool",
|
| 96 |
+
# input="Use the playwright tool, with Google browser, search for the latest news about the Trump administration on www.baidu.com",
|
| 97 |
+
# input="Use tavily-mcp",
|
| 98 |
+
agent=search,
|
| 99 |
+
conf=TaskConfig(),
|
| 100 |
+
event_driven=True
|
| 101 |
+
)
|
| 102 |
+
|
| 103 |
+
#result = Runners.sync_run_task(task)
|
| 104 |
+
#result = Runners.sync_run_task(task)
|
| 105 |
+
#result = await Runners.streamed_run_task(task)
|
| 106 |
+
# result = await Runners.run_task(task)
|
| 107 |
+
# print(
|
| 108 |
+
# "----------------------------------------------------------------------------------------------"
|
| 109 |
+
# )
|
| 110 |
+
# print(result)
|
| 111 |
+
# async for chunk in Runners.streamed_run_task(task).stream_events():
|
| 112 |
+
# print(chunk, end="", flush=True)
|
| 113 |
+
|
| 114 |
+
async for output in Runners.streamed_run_task(task).stream_events():
|
| 115 |
+
print(f"Agent Ouput: {output}")
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
if __name__ == "__main__":
|
| 120 |
+
asyncio.run(run())
|
examples/sandbox/run-test.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# coding: utf-8
|
| 2 |
+
# Copyright (c) 2025 inclusionAI.
|
| 3 |
+
import asyncio
|
| 4 |
+
import json
|
| 5 |
+
import os
|
| 6 |
+
|
| 7 |
+
from dotenv import load_dotenv
|
| 8 |
+
|
| 9 |
+
from aworld.config.conf import AgentConfig, TaskConfig
|
| 10 |
+
from aworld.agents.llm_agent import Agent
|
| 11 |
+
from aworld.core.task import Task
|
| 12 |
+
from aworld.runner import Runners
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
async def run():
|
| 16 |
+
load_dotenv()
|
| 17 |
+
llm_provider = os.getenv("LLM_PROVIDER_WEATHER", "openai")
|
| 18 |
+
llm_model_name = os.getenv("LLM_MODEL_NAME_WEATHER")
|
| 19 |
+
llm_api_key = os.getenv("LLM_API_KEY_WEATHER")
|
| 20 |
+
llm_base_url = os.getenv("LLM_BASE_URL_WEATHER")
|
| 21 |
+
llm_temperature = os.getenv("LLM_TEMPERATURE_WEATHER", 0.0)
|
| 22 |
+
|
| 23 |
+
agent_config = AgentConfig(
|
| 24 |
+
llm_provider=llm_provider,
|
| 25 |
+
llm_model_name=llm_model_name,
|
| 26 |
+
llm_api_key=llm_api_key,
|
| 27 |
+
llm_base_url=llm_base_url,
|
| 28 |
+
llm_temperature=llm_temperature,
|
| 29 |
+
)
|
| 30 |
+
mcp_servers = ["tavily-mcp"]
|
| 31 |
+
|
| 32 |
+
path_cwd = os.path.dirname(os.path.abspath(__file__))
|
| 33 |
+
mcp_path = os.path.join(path_cwd, "mcp.json")
|
| 34 |
+
with open(mcp_path, "r") as f:
|
| 35 |
+
mcp_config = json.load(f)
|
| 36 |
+
|
| 37 |
+
search_sys_prompt = "You are a versatile assistant"
|
| 38 |
+
search = Agent(
|
| 39 |
+
conf=agent_config,
|
| 40 |
+
name="search_agent",
|
| 41 |
+
system_prompt=search_sys_prompt,
|
| 42 |
+
mcp_config=mcp_config,
|
| 43 |
+
mcp_servers=mcp_servers,
|
| 44 |
+
)
|
| 45 |
+
|
| 46 |
+
# Run agent
|
| 47 |
+
task = Task(
|
| 48 |
+
input="Use tavily-mcp to check what tourist attractions are in Hangzhou",
|
| 49 |
+
agent=search,
|
| 50 |
+
conf=TaskConfig(),
|
| 51 |
+
)
|
| 52 |
+
|
| 53 |
+
result = Runners.sync_run_task(task)
|
| 54 |
+
print( "----------------------------------------------------------------------------------------------")
|
| 55 |
+
print(result)
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
if __name__ == "__main__":
|
| 59 |
+
asyncio.run(run())
|
examples/sandbox/test_inner_tool.py
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# coding: utf-8
|
| 2 |
+
# Copyright (c) 2025 inclusionAI.
|
| 3 |
+
|
| 4 |
+
import logging
|
| 5 |
+
from aworld.tools import get_function_tools, list_function_tools
|
| 6 |
+
from examples.sandbox.inner_tools import aworldsearch_function_tools, another_function_tools
|
| 7 |
+
|
| 8 |
+
aworldsearch_server = get_function_tools("aworldsearch_server")
|
| 9 |
+
|
| 10 |
+
print(aworldsearch_server.list_tools())
|
| 11 |
+
res = aworldsearch_server.call_tool("search", {"query_list": ["Tencent financial report", "Baidu financial report", "Alibaba financial report"],})
|
| 12 |
+
print(res)
|
| 13 |
+
|
| 14 |
+
another_server = get_function_tools("another-server")
|
| 15 |
+
print(another_server.list_tools())
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
# Configure logging
|
| 19 |
+
logging.basicConfig(
|
| 20 |
+
level=logging.INFO,
|
| 21 |
+
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
# Step 1: Import different modules, which will automatically register their respective FunctionTools instances
|
| 25 |
+
print("=== Step 1: Import modules, automatically register FunctionTools instances ===")
|
| 26 |
+
# Import aworldsearch_function_tools module, which registers "aworldsearch-server"
|
| 27 |
+
print("Imported aworldsearch_function_tools module")
|
| 28 |
+
|
| 29 |
+
# Import another_function_tools module, which registers "another-server"
|
| 30 |
+
|
| 31 |
+
print("Imported another_function_tools module")
|
| 32 |
+
|
| 33 |
+
# Step 2: Get FunctionTools instances by name
|
| 34 |
+
print("\n=== Step 2: Get FunctionTools instances by name ===")
|
| 35 |
+
from aworld.tools import get_function_tools, list_function_tools
|
| 36 |
+
|
| 37 |
+
# List all registered FunctionTools servers
|
| 38 |
+
print(f"All registered servers: {list_function_tools()}")
|
| 39 |
+
|
| 40 |
+
# Get server instance by specific name
|
| 41 |
+
aworldsearch_server = get_function_tools("aworldsearch-server")
|
| 42 |
+
print(f"Retrieved server: {aworldsearch_server.name}")
|
| 43 |
+
print(f"Server description: {aworldsearch_server.description}")
|
| 44 |
+
|
| 45 |
+
another_server = get_function_tools("another-server")
|
| 46 |
+
print(f"Retrieved server: {another_server.name}")
|
| 47 |
+
print(f"Server description: {another_server.description}")
|
| 48 |
+
|
| 49 |
+
# Step 3: Use the retrieved instances to call methods
|
| 50 |
+
print("\n=== Step 3: Use the retrieved instances to call methods ===")
|
| 51 |
+
# List all tools of aworldsearch server
|
| 52 |
+
print("aworldsearch-server tool list:")
|
| 53 |
+
for tool in aworldsearch_server.list_tools():
|
| 54 |
+
print(f" - {tool.name}: {tool.description}")
|
| 55 |
+
|
| 56 |
+
# List all tools of another server
|
| 57 |
+
print("\nanother-server tool list:")
|
| 58 |
+
for tool in another_server.list_tools():
|
| 59 |
+
print(f" - {tool.name}: {tool.description}")
|
| 60 |
+
|
| 61 |
+
# Step 4: Call tools
|
| 62 |
+
print("\n=== Step 4: Call tool examples ===")
|
| 63 |
+
# Call aworldsearch server's tool
|
| 64 |
+
if "demo_search" in [tool.name for tool in aworldsearch_server.list_tools()]:
|
| 65 |
+
print("Calling demo_search tool:")
|
| 66 |
+
result = aworldsearch_server.call_tool("demo_search", {"query_list": ["Test query"]})
|
| 67 |
+
print(result)
|
| 68 |
+
|
| 69 |
+
# Call another server's tool
|
| 70 |
+
if "get_weather" in [tool.name for tool in another_server.list_tools()]:
|
| 71 |
+
print("\nCalling get_weather tool:")
|
| 72 |
+
result = another_server.call_tool("get_weather", {"city": "Beijing"})
|
| 73 |
+
print(result)
|
| 74 |
+
|
| 75 |
+
if __name__ == "__main__":
|
| 76 |
+
pass # Main logic has already been executed at the module level
|