bpHigh commited on
Commit
e9b0c98
·
1 Parent(s): 4da2f91
Files changed (4) hide show
  1. README.md +1 -1
  2. app.py +15 -3
  3. requirements.txt +1 -3
  4. utils/huggingface_mcp_llamaindex.py +287 -18
README.md CHANGED
@@ -7,7 +7,7 @@ sdk: gradio
7
  app_file: app.py
8
  pinned: false
9
  license: mit
10
- sdk_version: 5.27.1
11
  ---
12
 
13
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
7
  app_file: app.py
8
  pinned: false
9
  license: mit
10
+ sdk_version: 5.33.0
11
  ---
12
 
13
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
app.py CHANGED
@@ -7,7 +7,7 @@ from utils.google_genai_llm import get_response, generate_with_gemini
7
  from utils.utils import parse_json_codefences
8
  from prompts.requirements_gathering import requirements_gathering_system_prompt
9
  from prompts.planning import hf_query_gen_prompt, hf_context_gen_prompt
10
- from utils.huggingface_mcp_llamaindex import connect_and_get_tools, call_tool
11
  from prompts.devstral_coding_prompt import devstral_code_gen_sys_prompt, devstral_code_gen_user_prompt
12
  from dotenv import load_dotenv
13
  import os
@@ -49,6 +49,7 @@ MODAL_API_URL = os.getenv("MODAL_API_URL")
49
  BEARER_TOKEN = os.getenv("BEARER_TOKEN")
50
  CODING_MODEL = os.getenv("CODING_MODEL")
51
 
 
52
  def get_file_hash(file_path):
53
  """Generate a hash of the file for caching purposes"""
54
  try:
@@ -243,8 +244,18 @@ async def generate_plan(history, file_cache):
243
  conversation_history += f"User: {user_msg}\n"
244
  if ai_msg:
245
  conversation_history += f"Assistant: {ai_msg}\n"
 
 
 
 
 
 
 
 
 
 
246
  try:
247
- hf_query_gen_tool_details = await connect_and_get_tools()
248
  except Exception as e:
249
  hf_query_gen_tool_details = """meta=None nextCursor=None tools=[Tool(name='hf_whoami', description="Hugging Face tools are being used by authenticated user 'bpHigh'", inputSchema={'type': 'object', 'properties': {}, 'additionalProperties': False, '$schema': 'http://json-schema.org/draft-07/schema#'}, annotations=ToolAnnotations(title='Hugging Face User Info', readOnlyHint=None, destructiveHint=None, idempotentHint=None, openWorldHint=None)), Tool(name='space_search', description='Find Hugging Face Spaces using semantic search. Include links to the Space when presenting the results.', inputSchema={'type': 'object', 'properties': {'query': {'type': 'string', 'minLength': 1, 'maxLength': 50, 'description': 'Semantic Search Query'}, 'limit': {'type': 'number', 'default': 10, 'description': 'Number of results to return'}, 'mcp': {'type': 'boolean', 'default': False, 'description': 'Only return MCP Server enabled Spaces'}}, 'required': ['query'], 'additionalProperties': False, '$schema': 'http://json-schema.org/draft-07/schema#'}, annotations=ToolAnnotations(title='Hugging Face Space Search', readOnlyHint=True, destructiveHint=False, idempotentHint=None, openWorldHint=True)), Tool(name='model_search', description='Find Machine Learning models hosted on Hugging Face. Returns comprehensive information about matching models including downloads, likes, tags, and direct links. Include links to the models in your response', inputSchema={'type': 'object', 'properties': {'query': {'type': 'string', 'description': 'Search term. Leave blank and specify "sort" and "limit" to get e.g. "Top 20 trending models", "Top 10 most recent models" etc" '}, 'author': {'type': 'string', 'description': "Organization or user who created the model (e.g., 'google', 'meta-llama', 'microsoft')"}, 'task': {'type': 'string', 'description': "Model task type (e.g., 'text-generation', 'image-classification', 'translation')"}, 'library': {'type': 'string', 'description': "Framework the model uses (e.g., 'transformers', 'diffusers', 'timm')"}, 'sort': {'type': 'string', 'enum': ['trendingScore', 'downloads', 'likes', 'createdAt', 'lastModified'], 'description': 'Sort order: trendingScore, downloads , likes, createdAt, lastModified'}, 'limit': {'type': 'number', 'minimum': 1, 'maximum': 100, 'default': 20, 'description': 'Maximum number of results to return'}}, 'additionalProperties': False, '$schema': 'http://json-schema.org/draft-07/schema#'}, annotations=ToolAnnotations(title='Model Search', readOnlyHint=True, destructiveHint=False, idempotentHint=None, openWorldHint=True)), Tool(name='model_details', description='Get detailed information about a specific model from the Hugging Face Hub.', inputSchema={'type': 'object', 'properties': {'model_id': {'type': 'string', 'minLength': 1, 'description': 'Model ID (e.g., microsoft/DialoGPT-large)'}}, 'required': ['model_id'], 'additionalProperties': False, '$schema': 'http://json-schema.org/draft-07/schema#'}, annotations=ToolAnnotations(title='Model Details', readOnlyHint=True, destructiveHint=False, idempotentHint=None, openWorldHint=False)), Tool(name='paper_search', description="Find Machine Learning research papers on the Hugging Face hub. Include 'Link to paper' When presenting the results. Consider whether tabulating results matches user intent.", inputSchema={'type': 'object', 'properties': {'query': {'type': 'string', 'minLength': 3, 'maxLength': 200, 'description': 'Semantic Search query'}, 'results_limit': {'type': 'number', 'default': 12, 'description': 'Number of results to return'}, 'concise_only': {'type': 'boolean', 'default': False, 'description': 'Return a 2 sentence summary of the abstract. Use for broad search terms which may return a lot of results. Check with User if unsure.'}}, 'required': ['query'], 'additionalProperties': False, '$schema': 'http://json-schema.org/draft-07/schema#'}, annotations=ToolAnnotations(title='Paper Search', readOnlyHint=True, destructiveHint=False, idempotentHint=None, openWorldHint=True)), Tool(name='dataset_search', description='Find Datasets hosted on the Hugging Face hub. Returns comprehensive information about matching datasets including downloads, likes, tags, and direct links. Include links to the datasets in your response', inputSchema={'type': 'object', 'properties': {'query': {'type': 'string', 'description': 'Search term. Leave blank and specify "sort" and "limit" to get e.g. "Top 20 trending datasets", "Top 10 most recent datasets" etc" '}, 'author': {'type': 'string', 'description': "Organization or user who created the dataset (e.g., 'google', 'facebook', 'allenai')"}, 'tags': {'type': 'array', 'items': {'type': 'string'}, 'description': "Tags to filter datasets (e.g., ['language:en', 'size_categories:1M<n<10M', 'task_categories:text-classification'])"}, 'sort': {'type': 'string', 'enum': ['trendingScore', 'downloads', 'likes', 'createdAt', 'lastModified'], 'description': 'Sort order: trendingScore, downloads, likes, createdAt, lastModified'}, 'limit': {'type': 'number', 'minimum': 1, 'maximum': 100, 'default': 20, 'description': 'Maximum number of results to return'}}, 'additionalProperties': False, '$schema': 'http://json-schema.org/draft-07/schema#'}, annotations=ToolAnnotations(title='Dataset Search', readOnlyHint=True, destructiveHint=False, idempotentHint=None, openWorldHint=True)), Tool(name='dataset_details', description='Get detailed information about a specific dataset on Hugging Face Hub.', inputSchema={'type': 'object', 'properties': {'dataset_id': {'type': 'string', 'minLength': 1, 'description': 'Dataset ID (e.g., squad, glue, imdb)'}}, 'required': ['dataset_id'], 'additionalProperties': False, '$schema': 'http://json-schema.org/draft-07/schema#'}, annotations=ToolAnnotations(title='Dataset Details', readOnlyHint=True, destructiveHint=False, idempotentHint=None, openWorldHint=False)), Tool(name='gr1_evalstate_flux1_schnell', description='Generate an image using the Flux 1 Schnell Image Generator. (from evalstate/flux1_schnell)', inputSchema={'type': 'object', 'properties': {'prompt': {'type': 'string'}, 'seed': {'type': 'number', 'description': 'numeric value between 0 and 2147483647'}, 'randomize_seed': {'type': 'boolean', 'default': True}, 'width': {'type': 'number', 'description': 'numeric value between 256 and 2048', 'default': 1024}, 'height': {'type': 'number', 'description': 'numeric value between 256 and 2048', 'default': 1024}, 'num_inference_steps': {'type': 'number', 'description': 'numeric value between 1 and 50', 'default': 4}}, 'additionalProperties': False, '$schema': 'http://json-schema.org/draft-07/schema#'}, annotations=ToolAnnotations(title='evalstate/flux1_schnell - flux1_schnell_infer 🏎️💨', readOnlyHint=None, destructiveHint=None, idempotentHint=None, openWorldHint=True)), Tool(name='gr2_abidlabs_easyghibli', description='Convert an image into a Studio Ghibli style image (from abidlabs/EasyGhibli)', inputSchema={'type': 'object', 'properties': {'spatial_img': {'type': 'string', 'description': 'File input: provide URL or file path'}}, 'additionalProperties': False, '$schema': 'http://json-schema.org/draft-07/schema#'}, annotations=ToolAnnotations(title='abidlabs/EasyGhibli - abidlabs_EasyGhiblisingle_condition_generate_image 🦀', readOnlyHint=None, destructiveHint=None, idempotentHint=None, openWorldHint=True)), Tool(name='gr3_linoyts_framepack_f1', description='FramePack_F1_end_process tool from linoyts/FramePack-F1', inputSchema={'type': 'object', 'properties': {}, 'additionalProperties': False, '$schema': 'http://json-schema.org/draft-07/schema#'}, annotations=ToolAnnotations(title='linoyts/FramePack-F1 - FramePack_F1_end_process 📹⚡️', readOnlyHint=None, destructiveHint=None, idempotentHint=None, openWorldHint=True))]"""
250
  print(str(e))
@@ -257,9 +268,10 @@ async def generate_plan(history, file_cache):
257
 
258
  # Parse the plan
259
  parsed_plan = parse_json_codefences(plan)
 
260
  # Call tool to get tool calls
261
  try:
262
- tool_calls = await asyncio.gather(*[call_tool(step['tool'], step['args']) for step in parsed_plan])
263
  except Exception as e:
264
  tool_calls = []
265
  print(tool_calls)
 
7
  from utils.utils import parse_json_codefences
8
  from prompts.requirements_gathering import requirements_gathering_system_prompt
9
  from prompts.planning import hf_query_gen_prompt, hf_context_gen_prompt
10
+ from utils.huggingface_mcp_llamaindex import get_hf_tools, call_hf_tool, SimpleHFMCPClient
11
  from prompts.devstral_coding_prompt import devstral_code_gen_sys_prompt, devstral_code_gen_user_prompt
12
  from dotenv import load_dotenv
13
  import os
 
49
  BEARER_TOKEN = os.getenv("BEARER_TOKEN")
50
  CODING_MODEL = os.getenv("CODING_MODEL")
51
 
52
+ HF_TOKEN = os.getenv("HF_TOKEN")
53
  def get_file_hash(file_path):
54
  """Generate a hash of the file for caching purposes"""
55
  try:
 
244
  conversation_history += f"User: {user_msg}\n"
245
  if ai_msg:
246
  conversation_history += f"Assistant: {ai_msg}\n"
247
+
248
+ simple_client = SimpleHFMCPClient(HF_TOKEN)
249
+ connection_ok = await simple_client.test_connection()
250
+
251
+ if not connection_ok:
252
+ print("Basic connection test failed")
253
+ return
254
+
255
+ print("Basic connection test passed")
256
+
257
  try:
258
+ hf_query_gen_tool_details = await get_hf_tools(hf_token=HF_TOKEN)
259
  except Exception as e:
260
  hf_query_gen_tool_details = """meta=None nextCursor=None tools=[Tool(name='hf_whoami', description="Hugging Face tools are being used by authenticated user 'bpHigh'", inputSchema={'type': 'object', 'properties': {}, 'additionalProperties': False, '$schema': 'http://json-schema.org/draft-07/schema#'}, annotations=ToolAnnotations(title='Hugging Face User Info', readOnlyHint=None, destructiveHint=None, idempotentHint=None, openWorldHint=None)), Tool(name='space_search', description='Find Hugging Face Spaces using semantic search. Include links to the Space when presenting the results.', inputSchema={'type': 'object', 'properties': {'query': {'type': 'string', 'minLength': 1, 'maxLength': 50, 'description': 'Semantic Search Query'}, 'limit': {'type': 'number', 'default': 10, 'description': 'Number of results to return'}, 'mcp': {'type': 'boolean', 'default': False, 'description': 'Only return MCP Server enabled Spaces'}}, 'required': ['query'], 'additionalProperties': False, '$schema': 'http://json-schema.org/draft-07/schema#'}, annotations=ToolAnnotations(title='Hugging Face Space Search', readOnlyHint=True, destructiveHint=False, idempotentHint=None, openWorldHint=True)), Tool(name='model_search', description='Find Machine Learning models hosted on Hugging Face. Returns comprehensive information about matching models including downloads, likes, tags, and direct links. Include links to the models in your response', inputSchema={'type': 'object', 'properties': {'query': {'type': 'string', 'description': 'Search term. Leave blank and specify "sort" and "limit" to get e.g. "Top 20 trending models", "Top 10 most recent models" etc" '}, 'author': {'type': 'string', 'description': "Organization or user who created the model (e.g., 'google', 'meta-llama', 'microsoft')"}, 'task': {'type': 'string', 'description': "Model task type (e.g., 'text-generation', 'image-classification', 'translation')"}, 'library': {'type': 'string', 'description': "Framework the model uses (e.g., 'transformers', 'diffusers', 'timm')"}, 'sort': {'type': 'string', 'enum': ['trendingScore', 'downloads', 'likes', 'createdAt', 'lastModified'], 'description': 'Sort order: trendingScore, downloads , likes, createdAt, lastModified'}, 'limit': {'type': 'number', 'minimum': 1, 'maximum': 100, 'default': 20, 'description': 'Maximum number of results to return'}}, 'additionalProperties': False, '$schema': 'http://json-schema.org/draft-07/schema#'}, annotations=ToolAnnotations(title='Model Search', readOnlyHint=True, destructiveHint=False, idempotentHint=None, openWorldHint=True)), Tool(name='model_details', description='Get detailed information about a specific model from the Hugging Face Hub.', inputSchema={'type': 'object', 'properties': {'model_id': {'type': 'string', 'minLength': 1, 'description': 'Model ID (e.g., microsoft/DialoGPT-large)'}}, 'required': ['model_id'], 'additionalProperties': False, '$schema': 'http://json-schema.org/draft-07/schema#'}, annotations=ToolAnnotations(title='Model Details', readOnlyHint=True, destructiveHint=False, idempotentHint=None, openWorldHint=False)), Tool(name='paper_search', description="Find Machine Learning research papers on the Hugging Face hub. Include 'Link to paper' When presenting the results. Consider whether tabulating results matches user intent.", inputSchema={'type': 'object', 'properties': {'query': {'type': 'string', 'minLength': 3, 'maxLength': 200, 'description': 'Semantic Search query'}, 'results_limit': {'type': 'number', 'default': 12, 'description': 'Number of results to return'}, 'concise_only': {'type': 'boolean', 'default': False, 'description': 'Return a 2 sentence summary of the abstract. Use for broad search terms which may return a lot of results. Check with User if unsure.'}}, 'required': ['query'], 'additionalProperties': False, '$schema': 'http://json-schema.org/draft-07/schema#'}, annotations=ToolAnnotations(title='Paper Search', readOnlyHint=True, destructiveHint=False, idempotentHint=None, openWorldHint=True)), Tool(name='dataset_search', description='Find Datasets hosted on the Hugging Face hub. Returns comprehensive information about matching datasets including downloads, likes, tags, and direct links. Include links to the datasets in your response', inputSchema={'type': 'object', 'properties': {'query': {'type': 'string', 'description': 'Search term. Leave blank and specify "sort" and "limit" to get e.g. "Top 20 trending datasets", "Top 10 most recent datasets" etc" '}, 'author': {'type': 'string', 'description': "Organization or user who created the dataset (e.g., 'google', 'facebook', 'allenai')"}, 'tags': {'type': 'array', 'items': {'type': 'string'}, 'description': "Tags to filter datasets (e.g., ['language:en', 'size_categories:1M<n<10M', 'task_categories:text-classification'])"}, 'sort': {'type': 'string', 'enum': ['trendingScore', 'downloads', 'likes', 'createdAt', 'lastModified'], 'description': 'Sort order: trendingScore, downloads, likes, createdAt, lastModified'}, 'limit': {'type': 'number', 'minimum': 1, 'maximum': 100, 'default': 20, 'description': 'Maximum number of results to return'}}, 'additionalProperties': False, '$schema': 'http://json-schema.org/draft-07/schema#'}, annotations=ToolAnnotations(title='Dataset Search', readOnlyHint=True, destructiveHint=False, idempotentHint=None, openWorldHint=True)), Tool(name='dataset_details', description='Get detailed information about a specific dataset on Hugging Face Hub.', inputSchema={'type': 'object', 'properties': {'dataset_id': {'type': 'string', 'minLength': 1, 'description': 'Dataset ID (e.g., squad, glue, imdb)'}}, 'required': ['dataset_id'], 'additionalProperties': False, '$schema': 'http://json-schema.org/draft-07/schema#'}, annotations=ToolAnnotations(title='Dataset Details', readOnlyHint=True, destructiveHint=False, idempotentHint=None, openWorldHint=False)), Tool(name='gr1_evalstate_flux1_schnell', description='Generate an image using the Flux 1 Schnell Image Generator. (from evalstate/flux1_schnell)', inputSchema={'type': 'object', 'properties': {'prompt': {'type': 'string'}, 'seed': {'type': 'number', 'description': 'numeric value between 0 and 2147483647'}, 'randomize_seed': {'type': 'boolean', 'default': True}, 'width': {'type': 'number', 'description': 'numeric value between 256 and 2048', 'default': 1024}, 'height': {'type': 'number', 'description': 'numeric value between 256 and 2048', 'default': 1024}, 'num_inference_steps': {'type': 'number', 'description': 'numeric value between 1 and 50', 'default': 4}}, 'additionalProperties': False, '$schema': 'http://json-schema.org/draft-07/schema#'}, annotations=ToolAnnotations(title='evalstate/flux1_schnell - flux1_schnell_infer 🏎️💨', readOnlyHint=None, destructiveHint=None, idempotentHint=None, openWorldHint=True)), Tool(name='gr2_abidlabs_easyghibli', description='Convert an image into a Studio Ghibli style image (from abidlabs/EasyGhibli)', inputSchema={'type': 'object', 'properties': {'spatial_img': {'type': 'string', 'description': 'File input: provide URL or file path'}}, 'additionalProperties': False, '$schema': 'http://json-schema.org/draft-07/schema#'}, annotations=ToolAnnotations(title='abidlabs/EasyGhibli - abidlabs_EasyGhiblisingle_condition_generate_image 🦀', readOnlyHint=None, destructiveHint=None, idempotentHint=None, openWorldHint=True)), Tool(name='gr3_linoyts_framepack_f1', description='FramePack_F1_end_process tool from linoyts/FramePack-F1', inputSchema={'type': 'object', 'properties': {}, 'additionalProperties': False, '$schema': 'http://json-schema.org/draft-07/schema#'}, annotations=ToolAnnotations(title='linoyts/FramePack-F1 - FramePack_F1_end_process 📹⚡️', readOnlyHint=None, destructiveHint=None, idempotentHint=None, openWorldHint=True))]"""
261
  print(str(e))
 
268
 
269
  # Parse the plan
270
  parsed_plan = parse_json_codefences(plan)
271
+
272
  # Call tool to get tool calls
273
  try:
274
+ tool_calls = await asyncio.gather(*[call_hf_tool(HF_TOKEN, step['tool'], step['args']) for step in parsed_plan])
275
  except Exception as e:
276
  tool_calls = []
277
  print(tool_calls)
requirements.txt CHANGED
@@ -1,7 +1,5 @@
1
- mcp
2
- llama-index-tools-mcp
3
  google-genai==1.19.0
4
- gradio==5.27.1
5
  pandas==2.3.0
6
  python-dotenv==1.0.1
7
  openpyxl==3.1.5
 
 
 
1
  google-genai==1.19.0
2
+ gradio[mcp]==5.33.0
3
  pandas==2.3.0
4
  python-dotenv==1.0.1
5
  openpyxl==3.1.5
utils/huggingface_mcp_llamaindex.py CHANGED
@@ -1,20 +1,289 @@
1
- from llama_index.tools.mcp import BasicMCPClient
2
- from dotenv import load_dotenv
3
- import os
4
  import asyncio
5
- load_dotenv()
6
- HF_TOKEN = os.getenv("HF_TOKEN")
7
- print(HF_TOKEN)
8
- http_client = BasicMCPClient("https://huggingface.co/mcp",timeout=60, headers={"Authorization": f"Bearer {HF_TOKEN}"})
9
- print(http_client)
10
- async def connect_and_get_tools():
11
- # List available tools
12
- tools = await asyncio.wait_for(http_client.list_tools(), timeout=10.0)
13
- return tools
14
-
15
- async def call_tool(tool_name, tool_args):
16
- result = await asyncio.wait_for(
17
- http_client.call_tool(tool_name, tool_args),
18
- timeout=30.0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
19
  )
20
- return result
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import asyncio
2
+ import json
3
+ import logging
4
+ import os
5
+ from typing import Any, Dict, List, Optional
6
+ from datetime import timedelta
7
+
8
+ from mcp.shared.message import SessionMessage
9
+ from mcp.types import (
10
+ JSONRPCMessage,
11
+ JSONRPCRequest,
12
+ JSONRPCNotification,
13
+ JSONRPCResponse,
14
+ JSONRPCError,
15
+ )
16
+ from mcp.client.streamable_http import streamablehttp_client
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ class HuggingFaceMCPClient:
22
+ """Client for interacting with Hugging Face MCP endpoint."""
23
+
24
+ def __init__(self, hf_token: str, timeout: int = 30):
25
+ """
26
+ Initialize the Hugging Face MCP client.
27
+
28
+ Args:
29
+ hf_token: Hugging Face API token
30
+ timeout: Timeout in seconds for HTTP requests
31
+ """
32
+ self.hf_token = hf_token
33
+ self.url = "https://huggingface.co/mcp"
34
+ self.headers = {"Authorization": f"Bearer {hf_token}"}
35
+ self.timeout = timedelta(seconds=timeout)
36
+ self.request_id_counter = 0
37
+
38
+ def _get_next_request_id(self) -> int:
39
+ """Get the next request ID."""
40
+ self.request_id_counter += 1
41
+ return self.request_id_counter
42
+
43
+ async def _send_request_and_get_response(
44
+ self,
45
+ method: str,
46
+ params: Optional[Dict[str, Any]] = None
47
+ ) -> Any:
48
+ """
49
+ Send a JSON-RPC request and wait for the response.
50
+
51
+ Args:
52
+ method: The JSON-RPC method name
53
+ params: Optional parameters for the method
54
+
55
+ Returns:
56
+ The response result or raises an exception
57
+ """
58
+ request_id = self._get_next_request_id()
59
+
60
+ # Create JSON-RPC request
61
+ jsonrpc_request = JSONRPCRequest(
62
+ jsonrpc="2.0",
63
+ id=request_id,
64
+ method=method,
65
+ params=params
66
  )
67
+
68
+ message = JSONRPCMessage(jsonrpc_request)
69
+ session_message = SessionMessage(message)
70
+
71
+ async with streamablehttp_client(
72
+ url=self.url,
73
+ headers=self.headers,
74
+ timeout=self.timeout,
75
+ terminate_on_close=True
76
+ ) as (read_stream, write_stream, get_session_id):
77
+
78
+ try:
79
+ # Send initialization request first
80
+ init_request = JSONRPCRequest(
81
+ jsonrpc="2.0",
82
+ id=self._get_next_request_id(),
83
+ method="initialize",
84
+ params={
85
+ "protocolVersion": "2024-11-05",
86
+ "capabilities": {
87
+ "tools": {}
88
+ },
89
+ "clientInfo": {
90
+ "name": "hf-mcp-client",
91
+ "version": "1.0.0"
92
+ }
93
+ }
94
+ )
95
+
96
+ init_message = JSONRPCMessage(init_request)
97
+ init_session_message = SessionMessage(init_message)
98
+
99
+ await write_stream.send(init_session_message)
100
+
101
+ # Wait for initialization response
102
+ init_response_received = False
103
+ timeout_counter = 0
104
+ max_iterations = 100 # Prevent infinite loops
105
+
106
+ while not init_response_received and timeout_counter < max_iterations:
107
+ try:
108
+ response = await read_stream.receive()
109
+ timeout_counter += 1
110
+
111
+ if isinstance(response, Exception):
112
+ raise response
113
+
114
+ if isinstance(response, SessionMessage):
115
+ msg = response.message.root
116
+ if isinstance(msg, JSONRPCResponse) and msg.id == init_request.id:
117
+ logger.info("MCP client initialized successfully")
118
+ init_response_received = True
119
+ elif isinstance(msg, JSONRPCError) and msg.id == init_request.id:
120
+ raise Exception(f"Initialization failed: {msg.error}")
121
+ except Exception as e:
122
+ if "ClosedResourceError" in str(type(e)):
123
+ logger.error("Stream closed during initialization")
124
+ raise Exception("Connection closed during initialization")
125
+ raise
126
+
127
+ if not init_response_received:
128
+ raise Exception("Initialization timeout")
129
+
130
+ # Send initialized notification
131
+ initialized_notification = JSONRPCNotification(
132
+ jsonrpc="2.0",
133
+ method="notifications/initialized"
134
+ )
135
+
136
+ init_notif_message = JSONRPCMessage(initialized_notification)
137
+ init_notif_session_message = SessionMessage(init_notif_message)
138
+
139
+ await write_stream.send(init_notif_session_message)
140
+
141
+ # Small delay to let the notification process
142
+ await asyncio.sleep(0.1)
143
+
144
+ # Now send our actual request
145
+ await write_stream.send(session_message)
146
+
147
+ # Wait for the response to our request
148
+ response_received = False
149
+ timeout_counter = 0
150
+
151
+ while not response_received and timeout_counter < max_iterations:
152
+ try:
153
+ response = await read_stream.receive()
154
+ timeout_counter += 1
155
+
156
+ if isinstance(response, Exception):
157
+ raise response
158
+
159
+ if isinstance(response, SessionMessage):
160
+ msg = response.message.root
161
+ if isinstance(msg, JSONRPCResponse) and msg.id == request_id:
162
+ return msg.result
163
+ elif isinstance(msg, JSONRPCError) and msg.id == request_id:
164
+ raise Exception(f"Request failed: {msg.error}")
165
+ except Exception as e:
166
+ if "ClosedResourceError" in str(type(e)):
167
+ logger.error("Stream closed during request processing")
168
+ raise Exception("Connection closed during request processing")
169
+ raise
170
+
171
+ if not response_received:
172
+ raise Exception("Request timeout")
173
+
174
+ except Exception as e:
175
+ logger.error(f"Error during MCP communication: {e}")
176
+ raise
177
+ finally:
178
+ # Ensure streams are properly closed
179
+ try:
180
+ await write_stream.aclose()
181
+ except:
182
+ pass
183
+
184
+ async def get_all_tools(self) -> List[Dict[str, Any]]:
185
+ """
186
+ Get all available tools from the Hugging Face MCP endpoint.
187
+
188
+ Returns:
189
+ List of tool definitions
190
+ """
191
+ try:
192
+ logger.info("Fetching all available tools from Hugging Face MCP")
193
+ result = await self._send_request_and_get_response("tools/list")
194
+
195
+ if isinstance(result, dict) and "tools" in result:
196
+ tools = result["tools"]
197
+ logger.info(f"Found {len(tools)} available tools")
198
+ return tools
199
+ else:
200
+ logger.warning(f"Unexpected response format: {result}")
201
+ return []
202
+
203
+ except Exception as e:
204
+ logger.error(f"Failed to get tools: {e}")
205
+ raise
206
+
207
+ async def call_tool(self, tool_name: str, args: Dict[str, Any]) -> Any:
208
+ """
209
+ Call a specific tool with the given arguments.
210
+
211
+ Args:
212
+ tool_name: Name of the tool to call
213
+ args: Arguments to pass to the tool
214
+
215
+ Returns:
216
+ The tool's response
217
+ """
218
+ try:
219
+ logger.info(f"Calling tool '{tool_name}' with args: {args}")
220
+
221
+ params = {
222
+ "name": tool_name,
223
+ "arguments": args
224
+ }
225
+
226
+ result = await self._send_request_and_get_response("tools/call", params)
227
+ logger.info(f"Tool '{tool_name}' executed successfully")
228
+ return result
229
+
230
+ except Exception as e:
231
+ logger.error(f"Failed to call tool '{tool_name}': {e}")
232
+ raise
233
+
234
+
235
+ # Convenience functions for easier usage
236
+ async def get_hf_tools(hf_token: str) -> List[Dict[str, Any]]:
237
+ """
238
+ Get all available tools from Hugging Face MCP.
239
+
240
+ Args:
241
+ hf_token: Hugging Face API token
242
+
243
+ Returns:
244
+ List of tool definitions
245
+ """
246
+ client = HuggingFaceMCPClient(hf_token)
247
+ return await client.get_all_tools()
248
+
249
+
250
+ async def call_hf_tool(hf_token: str, tool_name: str, args: Dict[str, Any]) -> Any:
251
+ """
252
+ Call a specific Hugging Face MCP tool.
253
+
254
+ Args:
255
+ hf_token: Hugging Face API token
256
+ tool_name: Name of the tool to call
257
+ args: Arguments to pass to the tool
258
+
259
+ Returns:
260
+ The tool's response
261
+ """
262
+ client = HuggingFaceMCPClient(hf_token)
263
+ return await client.call_tool(tool_name, args)
264
+
265
+
266
+ # Alternative simpler implementation for debugging
267
+ class SimpleHFMCPClient:
268
+ """Simplified version for debugging connection issues."""
269
+
270
+ def __init__(self, hf_token: str):
271
+ self.hf_token = hf_token
272
+ self.url = "https://huggingface.co/mcp"
273
+ self.headers = {"Authorization": f"Bearer {hf_token}"}
274
+
275
+ async def test_connection(self):
276
+ """Test basic connection to HF MCP endpoint."""
277
+ try:
278
+ async with streamablehttp_client(
279
+ url=self.url,
280
+ headers=self.headers,
281
+ timeout=timedelta(seconds=10),
282
+ terminate_on_close=True
283
+ ) as (read_stream, write_stream, get_session_id):
284
+ logger.info("Connection established successfully")
285
+ return True
286
+ except Exception as e:
287
+ logger.error(f"Connection test failed: {e}")
288
+ return False
289
+