Spaces:
Running
Running
File size: 1,812 Bytes
a27d846 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 |
"""
Universal MCP Client - Main Application
A modular Gradio chatbot that uses HuggingFace Inference Providers to access OpenAI GPT OSS models
and can connect to various MCP servers for enhanced functionality.
"""
import logging
import os
from config import AppConfig
from mcp_client import UniversalMCPClient
from ui_components import UIComponents
# Set up logging
logger = logging.getLogger(__name__)
def main():
"""Main application entry point"""
logger.info("π Starting Universal MCP Client with HuggingFace Inference Providers...")
try:
# Check for HuggingFace token
if not AppConfig.HF_TOKEN:
logger.warning("β οΈ No HF_TOKEN found in environment variables")
logger.info("π‘ Users will need to login manually or set HF_TOKEN")
else:
logger.info("β
HF_TOKEN found in environment")
# Initialize the MCP client
mcp_client = UniversalMCPClient()
# Create UI components
ui_components = UIComponents(mcp_client)
# Create the Gradio interface
demo = ui_components.create_interface()
# Launch the application
demo.launch(
debug=AppConfig.DEBUG_MODE,
share=False, # Set to True if you want to create a public link
server_name="0.0.0.0", # Allow external connections
server_port=7860, # Default Gradio port
auth=None, # No authentication (handled by HF login)
max_threads=40 # Allow multiple concurrent users
)
logger.info("β
Universal MCP Client started successfully!")
except Exception as e:
logger.error(f"β Failed to start application: {e}")
raise
if __name__ == "__main__":
main()
|