Commit
·
3f3cebf
1
Parent(s):
6e0834d
Improve error handling and adjust height
Browse filesSigned-off-by: Aivin V. Solatorio <[email protected]>
- mcp_remote_client.py +88 -45
mcp_remote_client.py
CHANGED
|
@@ -69,54 +69,70 @@ class MCPClientWrapper:
|
|
| 69 |
self.tools = []
|
| 70 |
|
| 71 |
async def connect(self, server_path_or_url: str) -> str:
|
| 72 |
-
|
| 73 |
-
|
| 74 |
-
|
|
|
|
| 75 |
|
| 76 |
-
|
| 77 |
|
| 78 |
-
|
| 79 |
-
|
| 80 |
|
| 81 |
-
|
| 82 |
-
|
| 83 |
-
|
| 84 |
-
|
| 85 |
-
|
| 86 |
|
| 87 |
-
|
| 88 |
-
|
| 89 |
-
|
| 90 |
-
|
| 91 |
-
|
| 92 |
-
|
| 93 |
-
sse_transport = await self.exit_stack.enter_async_context(
|
| 94 |
-
sse_client(
|
| 95 |
-
server_path_or_url,
|
| 96 |
-
headers={"Authorization": f"Bearer {os.getenv('HF_TOKEN')}"},
|
| 97 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 98 |
)
|
| 99 |
-
|
|
|
|
| 100 |
|
| 101 |
-
|
| 102 |
-
|
| 103 |
-
|
| 104 |
-
|
| 105 |
-
|
| 106 |
-
|
| 107 |
-
|
| 108 |
-
|
| 109 |
-
|
| 110 |
-
|
| 111 |
-
|
| 112 |
-
|
| 113 |
-
}
|
| 114 |
-
|
| 115 |
-
|
| 116 |
-
|
| 117 |
-
|
| 118 |
-
|
| 119 |
-
|
|
|
|
|
|
|
|
|
|
| 120 |
|
| 121 |
async def process_message(
|
| 122 |
self, message: str, history: List[Union[Dict[str, Any], ChatMessage]]
|
|
@@ -239,7 +255,32 @@ class MCPClientWrapper:
|
|
| 239 |
yield [result_messages[-1]]
|
| 240 |
|
| 241 |
print(f"Calling tool: {tool_name} with args: {tool_args}")
|
| 242 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 243 |
|
| 244 |
if result_messages and "metadata" in result_messages[-2]:
|
| 245 |
result_messages[-2]["metadata"]["status"] = "done"
|
|
@@ -360,7 +401,7 @@ def gradio_interface(
|
|
| 360 |
|
| 361 |
chatbot = gr.Chatbot(
|
| 362 |
value=[],
|
| 363 |
-
height=
|
| 364 |
type="messages",
|
| 365 |
show_copy_button=True,
|
| 366 |
avatar_images=("img/small-user.png", "img/small-robot.png"),
|
|
@@ -375,9 +416,11 @@ def gradio_interface(
|
|
| 375 |
)
|
| 376 |
clear_btn = gr.Button("Clear Chat", scale=1)
|
| 377 |
|
| 378 |
-
connect_btn.click(client.connect, inputs=server_path, outputs=status)
|
| 379 |
# Automatically call client.connect(...) as soon as the interface loads
|
| 380 |
-
demo.load(
|
|
|
|
|
|
|
| 381 |
|
| 382 |
msg.submit(client.process_message, [msg, chatbot], [chatbot, msg])
|
| 383 |
clear_btn.click(lambda: [], None, chatbot)
|
|
|
|
| 69 |
self.tools = []
|
| 70 |
|
| 71 |
async def connect(self, server_path_or_url: str) -> str:
|
| 72 |
+
try:
|
| 73 |
+
# If there's an existing session, close it
|
| 74 |
+
if self.exit_stack:
|
| 75 |
+
await self.exit_stack.aclose()
|
| 76 |
|
| 77 |
+
self.exit_stack = AsyncExitStack()
|
| 78 |
|
| 79 |
+
if server_path_or_url.endswith(".py"):
|
| 80 |
+
command = "python"
|
| 81 |
|
| 82 |
+
server_params = StdioServerParameters(
|
| 83 |
+
command=command,
|
| 84 |
+
args=[server_path_or_url],
|
| 85 |
+
env={"PYTHONIOENCODING": "utf-8", "PYTHONUNBUFFERED": "1"},
|
| 86 |
+
)
|
| 87 |
|
| 88 |
+
print(
|
| 89 |
+
f"Starting MCP server with command: {command} {server_path_or_url}"
|
| 90 |
+
)
|
| 91 |
+
# Launch MCP subprocess and bind streams on the current running loop
|
| 92 |
+
stdio_transport = await self.exit_stack.enter_async_context(
|
| 93 |
+
stdio_client(server_params)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 94 |
)
|
| 95 |
+
self.stdio, self.write = stdio_transport
|
| 96 |
+
else:
|
| 97 |
+
print(f"Connecting to MCP server at: {server_path_or_url}")
|
| 98 |
+
sse_transport = await self.exit_stack.enter_async_context(
|
| 99 |
+
sse_client(
|
| 100 |
+
server_path_or_url,
|
| 101 |
+
headers={"Authorization": f"Bearer {os.getenv('HF_TOKEN')}"},
|
| 102 |
+
)
|
| 103 |
+
)
|
| 104 |
+
self.stdio, self.write = sse_transport
|
| 105 |
+
|
| 106 |
+
print("Creating MCP client session...")
|
| 107 |
+
# Create ClientSession on this same loop
|
| 108 |
+
self.session = await self.exit_stack.enter_async_context(
|
| 109 |
+
ClientSession(self.stdio, self.write)
|
| 110 |
)
|
| 111 |
+
await self.session.initialize()
|
| 112 |
+
print("MCP session initialized successfully")
|
| 113 |
|
| 114 |
+
response = await self.session.list_tools()
|
| 115 |
+
self.tools = [
|
| 116 |
+
{
|
| 117 |
+
"name": tool.name,
|
| 118 |
+
"description": tool.description,
|
| 119 |
+
"input_schema": tool.inputSchema,
|
| 120 |
+
}
|
| 121 |
+
for tool in response.tools
|
| 122 |
+
]
|
| 123 |
+
|
| 124 |
+
print("Available tools:", self.tools)
|
| 125 |
+
tool_names = [tool["name"] for tool in self.tools]
|
| 126 |
+
return f"Connected to MCP server. Available tools: {', '.join(tool_names)}"
|
| 127 |
+
except Exception as e:
|
| 128 |
+
error_msg = f"Failed to connect to MCP server: {str(e)}"
|
| 129 |
+
print(error_msg)
|
| 130 |
+
# Clean up on error
|
| 131 |
+
if self.exit_stack:
|
| 132 |
+
await self.exit_stack.aclose()
|
| 133 |
+
self.exit_stack = None
|
| 134 |
+
self.session = None
|
| 135 |
+
return error_msg
|
| 136 |
|
| 137 |
async def process_message(
|
| 138 |
self, message: str, history: List[Union[Dict[str, Any], ChatMessage]]
|
|
|
|
| 255 |
yield [result_messages[-1]]
|
| 256 |
|
| 257 |
print(f"Calling tool: {tool_name} with args: {tool_args}")
|
| 258 |
+
try:
|
| 259 |
+
# Check if session is still valid
|
| 260 |
+
if not self.session or not self.stdio or not self.write:
|
| 261 |
+
raise Exception(
|
| 262 |
+
"MCP session is not connected or has been closed"
|
| 263 |
+
)
|
| 264 |
+
|
| 265 |
+
result = await self.session.call_tool(tool_name, tool_args)
|
| 266 |
+
except Exception as e:
|
| 267 |
+
error_msg = f"Error calling tool {tool_name}: {str(e)}"
|
| 268 |
+
print(error_msg)
|
| 269 |
+
result_messages.append(
|
| 270 |
+
{
|
| 271 |
+
"role": "assistant",
|
| 272 |
+
"content": f"Sorry, I encountered an error while calling the tool: {error_msg}. Please try again.",
|
| 273 |
+
"metadata": {
|
| 274 |
+
"title": f"Tool Error for {tool_name.replace('avsolatorio_test_data_mcp_server', '')}",
|
| 275 |
+
"status": "error",
|
| 276 |
+
"id": f"error_{tool_name}",
|
| 277 |
+
},
|
| 278 |
+
}
|
| 279 |
+
)
|
| 280 |
+
partial_messages.append(result_messages[-1])
|
| 281 |
+
yield [result_messages[-1]]
|
| 282 |
+
partial_messages = []
|
| 283 |
+
continue
|
| 284 |
|
| 285 |
if result_messages and "metadata" in result_messages[-2]:
|
| 286 |
result_messages[-2]["metadata"]["status"] = "done"
|
|
|
|
| 401 |
|
| 402 |
chatbot = gr.Chatbot(
|
| 403 |
value=[],
|
| 404 |
+
height="75vh",
|
| 405 |
type="messages",
|
| 406 |
show_copy_button=True,
|
| 407 |
avatar_images=("img/small-user.png", "img/small-robot.png"),
|
|
|
|
| 416 |
)
|
| 417 |
clear_btn = gr.Button("Clear Chat", scale=1)
|
| 418 |
|
| 419 |
+
# connect_btn.click(client.connect, inputs=server_path, outputs=status)
|
| 420 |
# Automatically call client.connect(...) as soon as the interface loads
|
| 421 |
+
demo.load(
|
| 422 |
+
fn=client.connect, inputs=server_path, outputs=status, show_progress="full"
|
| 423 |
+
)
|
| 424 |
|
| 425 |
msg.submit(client.process_message, [msg, chatbot], [chatbot, msg])
|
| 426 |
clear_btn.click(lambda: [], None, chatbot)
|