ysharma's picture
ysharma HF Staff
Create app.py
a27d846 verified
"""
Universal MCP Client - Main Application
A modular Gradio chatbot that uses HuggingFace Inference Providers to access OpenAI GPT OSS models
and can connect to various MCP servers for enhanced functionality.
"""
import logging
import os
from config import AppConfig
from mcp_client import UniversalMCPClient
from ui_components import UIComponents
# Set up logging
logger = logging.getLogger(__name__)
def main():
"""Main application entry point"""
logger.info("πŸš€ Starting Universal MCP Client with HuggingFace Inference Providers...")
try:
# Check for HuggingFace token
if not AppConfig.HF_TOKEN:
logger.warning("⚠️ No HF_TOKEN found in environment variables")
logger.info("πŸ’‘ Users will need to login manually or set HF_TOKEN")
else:
logger.info("βœ… HF_TOKEN found in environment")
# Initialize the MCP client
mcp_client = UniversalMCPClient()
# Create UI components
ui_components = UIComponents(mcp_client)
# Create the Gradio interface
demo = ui_components.create_interface()
# Launch the application
demo.launch(
debug=AppConfig.DEBUG_MODE,
share=False, # Set to True if you want to create a public link
server_name="0.0.0.0", # Allow external connections
server_port=7860, # Default Gradio port
auth=None, # No authentication (handled by HF login)
max_threads=40 # Allow multiple concurrent users
)
logger.info("βœ… Universal MCP Client started successfully!")
except Exception as e:
logger.error(f"❌ Failed to start application: {e}")
raise
if __name__ == "__main__":
main()