Vladt-Tempest commited on
Commit
93f0a5a
·
1 Parent(s): 4c1ada1

Working Basic

Browse files
Files changed (4) hide show
  1. README.md +0 -1
  2. app.py +236 -0
  3. mcp_server.py +58 -0
  4. requirements.txt +3 -0
README.md CHANGED
@@ -10,4 +10,3 @@ pinned: false
10
  license: apache-2.0
11
  ---
12
 
13
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
10
  license: apache-2.0
11
  ---
12
 
 
app.py ADDED
@@ -0,0 +1,236 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import os
3
+ import json
4
+ from typing import List, Dict, Any, Union
5
+ from contextlib import AsyncExitStack
6
+
7
+ import gradio as gr
8
+ from gradio.components.chatbot import ChatMessage
9
+ from mcp import ClientSession, StdioServerParameters
10
+ from mcp.client.stdio import stdio_client
11
+ from anthropic import Anthropic
12
+
13
+
14
+ loop = asyncio.new_event_loop()
15
+ asyncio.set_event_loop(loop)
16
+
17
+ class MCPClientWrapper:
18
+ def __init__(self):
19
+ self.session = None
20
+ self.exit_stack = None
21
+ self.anthropic = Anthropic()
22
+ self.tools = []
23
+
24
+ def connect(self, server_path: str) -> str:
25
+ return loop.run_until_complete(self._connect(server_path))
26
+
27
+ async def _connect(self, server_path: str) -> str:
28
+ if self.exit_stack:
29
+ await self.exit_stack.aclose()
30
+
31
+ self.exit_stack = AsyncExitStack()
32
+
33
+ is_python = server_path.endswith('.py')
34
+ command = "python" if is_python else "node"
35
+
36
+ server_params = StdioServerParameters(
37
+ command=command,
38
+ args=[server_path],
39
+ env={"PYTHONIOENCODING": "utf-8", "PYTHONUNBUFFERED": "1"}
40
+ )
41
+
42
+ stdio_transport = await self.exit_stack.enter_async_context(stdio_client(server_params))
43
+ self.stdio, self.write = stdio_transport
44
+
45
+ self.session = await self.exit_stack.enter_async_context(ClientSession(self.stdio, self.write))
46
+ await self.session.initialize()
47
+
48
+ response = await self.session.list_tools()
49
+ self.tools = [{
50
+ "name": tool.name,
51
+ "description": tool.description,
52
+ "input_schema": tool.inputSchema
53
+ } for tool in response.tools]
54
+
55
+ tool_names = [tool["name"] for tool in self.tools]
56
+ return f"Connected to MCP server. Available tools: {', '.join(tool_names)}"
57
+
58
+ def process_message(self, message: str, history: List[Union[Dict[str, Any], ChatMessage]]) -> tuple:
59
+ if not self.session:
60
+ return history + [
61
+ {"role": "user", "content": message},
62
+ {"role": "assistant", "content": "Please connect to an MCP server first."}
63
+ ], gr.Textbox(value="")
64
+
65
+ new_messages = loop.run_until_complete(self._process_query(message, history))
66
+ return history + [{"role": "user", "content": message}] + new_messages, gr.Textbox(value="")
67
+
68
+ async def _process_query(self, message: str, history: List[Union[Dict[str, Any], ChatMessage]]):
69
+ claude_messages = []
70
+ for msg in history:
71
+ if isinstance(msg, ChatMessage):
72
+ role, content = msg.role, msg.content
73
+ else:
74
+ role, content = msg.get("role"), msg.get("content")
75
+
76
+ if role in ["user", "assistant", "system"]:
77
+ claude_messages.append({"role": role, "content": content})
78
+
79
+ claude_messages.append({"role": "user", "content": message})
80
+
81
+ response = self.anthropic.messages.create(
82
+ model="claude-3-5-sonnet-20241022",
83
+ max_tokens=1000,
84
+ messages=claude_messages,
85
+ tools=self.tools
86
+ )
87
+
88
+ result_messages = []
89
+
90
+ for content in response.content:
91
+ if content.type == 'text':
92
+ result_messages.append({
93
+ "role": "assistant",
94
+ "content": content.text
95
+ })
96
+
97
+ elif content.type == 'tool_use':
98
+ tool_name = content.name
99
+ tool_args = content.input
100
+
101
+ result_messages.append({
102
+ "role": "assistant",
103
+ "content": f"I'll use the {tool_name} tool to help answer your question.",
104
+ "metadata": {
105
+ "title": f"Using tool: {tool_name}",
106
+ "log": f"Parameters: {json.dumps(tool_args, ensure_ascii=True)}",
107
+ "status": "pending",
108
+ "id": f"tool_call_{tool_name}"
109
+ }
110
+ })
111
+
112
+ result_messages.append({
113
+ "role": "assistant",
114
+ "content": "```json\n" + json.dumps(tool_args, indent=2, ensure_ascii=True) + "\n```",
115
+ "metadata": {
116
+ "parent_id": f"tool_call_{tool_name}",
117
+ "id": f"params_{tool_name}",
118
+ "title": "Tool Parameters"
119
+ }
120
+ })
121
+
122
+ result = await self.session.call_tool(tool_name, tool_args)
123
+
124
+ if result_messages and "metadata" in result_messages[-2]:
125
+ result_messages[-2]["metadata"]["status"] = "done"
126
+
127
+ result_messages.append({
128
+ "role": "assistant",
129
+ "content": "Here are the results from the tool:",
130
+ "metadata": {
131
+ "title": f"Tool Result for {tool_name}",
132
+ "status": "done",
133
+ "id": f"result_{tool_name}"
134
+ }
135
+ })
136
+
137
+ result_content = result.content
138
+ if isinstance(result_content, list):
139
+ result_content = "\n".join(str(item) for item in result_content)
140
+
141
+ try:
142
+ result_json = json.loads(result_content)
143
+ if isinstance(result_json, dict) and "type" in result_json:
144
+ if result_json["type"] == "image" and "url" in result_json:
145
+ result_messages.append({
146
+ "role": "assistant",
147
+ "content": {"path": result_json["url"], "alt_text": result_json.get("message", "Generated image")},
148
+ "metadata": {
149
+ "parent_id": f"result_{tool_name}",
150
+ "id": f"image_{tool_name}",
151
+ "title": "Generated Image"
152
+ }
153
+ })
154
+ else:
155
+ result_messages.append({
156
+ "role": "assistant",
157
+ "content": "```\n" + result_content + "\n```",
158
+ "metadata": {
159
+ "parent_id": f"result_{tool_name}",
160
+ "id": f"raw_result_{tool_name}",
161
+ "title": "Raw Output"
162
+ }
163
+ })
164
+ except:
165
+ result_messages.append({
166
+ "role": "assistant",
167
+ "content": "```\n" + result_content + "\n```",
168
+ "metadata": {
169
+ "parent_id": f"result_{tool_name}",
170
+ "id": f"raw_result_{tool_name}",
171
+ "title": "Raw Output"
172
+ }
173
+ })
174
+
175
+ claude_messages.append({"role": "user", "content": f"Tool result for {tool_name}: {result_content}"})
176
+ next_response = self.anthropic.messages.create(
177
+ model="claude-3-5-sonnet-20241022",
178
+ max_tokens=1000,
179
+ messages=claude_messages,
180
+ )
181
+
182
+ if next_response.content and next_response.content[0].type == 'text':
183
+ result_messages.append({
184
+ "role": "assistant",
185
+ "content": next_response.content[0].text
186
+ })
187
+
188
+ return result_messages
189
+
190
+ client = MCPClientWrapper()
191
+
192
+ def gradio_interface():
193
+ with gr.Blocks(title="MCP Assistant Client") as demo:
194
+ gr.Markdown("# MCP Financial Assistant")
195
+ gr.Markdown("Connect to your MCP Financial server and chat with the assistant")
196
+
197
+ with gr.Row(equal_height=True):
198
+ with gr.Column(scale=4):
199
+ server_path = gr.Textbox(
200
+ label="Server Script Path",
201
+ placeholder="Enter path to server script (e.g., weather.py)",
202
+ value="mcp_server.py"
203
+ )
204
+ with gr.Column(scale=1):
205
+ connect_btn = gr.Button("Connect")
206
+
207
+ status = gr.Textbox(label="Connection Status", interactive=False)
208
+
209
+ chatbot = gr.Chatbot(
210
+ value=[],
211
+ height=500,
212
+ type="messages",
213
+ show_copy_button=True,
214
+ avatar_images=("👤", "🤖")
215
+ )
216
+
217
+ with gr.Row(equal_height=True):
218
+ msg = gr.Textbox(
219
+ label="Your Question",
220
+ placeholder="Ask about financial assets or market trends (e.g., What's the stock price of AAPL?)",
221
+ scale=4
222
+ )
223
+ clear_btn = gr.Button("Clear Chat", scale=1)
224
+
225
+ connect_btn.click(client.connect, inputs=server_path, outputs=status)
226
+ msg.submit(client.process_message, [msg, chatbot], [chatbot, msg])
227
+ clear_btn.click(lambda: [], None, chatbot)
228
+
229
+ return demo
230
+
231
+ if __name__ == "__main__":
232
+ if not os.getenv("ANTHROPIC_API_KEY"):
233
+ print("Warning: ANTHROPIC_API_KEY not found in environment. Please set it in your .env file.")
234
+
235
+ interface = gradio_interface()
236
+ interface.launch(debug=True)
mcp_server.py ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from mcp.server.fastmcp import FastMCP
2
+ import json
3
+ import sys
4
+ import io
5
+ import time
6
+ from gradio_client import Client
7
+
8
+ sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace')
9
+ sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8', errors='replace')
10
+
11
+ mcp = FastMCP("huggingface_spaces_image_display")
12
+
13
+ @mcp.tool()
14
+ async def generate_image(prompt: str, width: int = 512, height: int = 512) -> str:
15
+ """Generate an image using SanaSprint model.
16
+
17
+ Args:
18
+ prompt: Text prompt describing the image to generate
19
+ width: Image width (default: 512)
20
+ height: Image height (default: 512)
21
+ """
22
+ client = Client("https://ysharma-sanasprint.hf.space/")
23
+
24
+ try:
25
+ result = client.predict(
26
+ prompt,
27
+ "0.6B",
28
+ 0,
29
+ True,
30
+ width,
31
+ height,
32
+ 4.0,
33
+ 2,
34
+ api_name="/infer"
35
+ )
36
+
37
+ if isinstance(result, list) and len(result) >= 1:
38
+ image_data = result[0]
39
+ if isinstance(image_data, dict) and "url" in image_data:
40
+ return json.dumps({
41
+ "type": "image",
42
+ "url": image_data["url"],
43
+ "message": f"Generated image for prompt: {prompt}"
44
+ })
45
+
46
+ return json.dumps({
47
+ "type": "error",
48
+ "message": "Failed to generate image"
49
+ })
50
+
51
+ except Exception as e:
52
+ return json.dumps({
53
+ "type": "error",
54
+ "message": f"Error generating image: {str(e)}"
55
+ })
56
+
57
+ if __name__ == "__main__":
58
+ mcp.run(transport='stdio')
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ gradio
2
+ anthropic
3
+ mcp