inference-providers-mcp / pyproject.toml
burtenshaw
switch back to gradio
551ae1a
raw
history blame contribute delete
318 Bytes
[project]
name = "inference-providers-mcp"
version = "0.1.0"
description = "MCP Server for Hugging Face Inference Providers Chat Completion"
readme = "README.md"
requires-python = ">=3.11"
dependencies = [
"gradio[mcp]>=5.34.0",
"huggingface_hub>=0.20.0",
"requests>=2.31.0",
"python-dotenv>=1.0.0",
]