|
""" |
|
Universal MCP Client - Main Application |
|
A modular Gradio chatbot that uses HuggingFace Inference Providers to access OpenAI GPT OSS models |
|
and can connect to various MCP servers for enhanced functionality. |
|
""" |
|
import logging |
|
import os |
|
|
|
from config import AppConfig |
|
from mcp_client import UniversalMCPClient |
|
from ui_components import UIComponents |
|
|
|
|
|
logger = logging.getLogger(__name__) |
|
|
|
def main(): |
|
"""Main application entry point""" |
|
logger.info("π Starting Universal MCP Client with HuggingFace Inference Providers...") |
|
|
|
try: |
|
|
|
if not AppConfig.HF_TOKEN: |
|
logger.warning("β οΈ No HF_TOKEN found in environment variables") |
|
logger.info("π‘ Users will need to login manually or set HF_TOKEN") |
|
else: |
|
logger.info("β
HF_TOKEN found in environment") |
|
|
|
|
|
mcp_client = UniversalMCPClient() |
|
|
|
|
|
ui_components = UIComponents(mcp_client) |
|
|
|
|
|
demo = ui_components.create_interface() |
|
|
|
|
|
demo.launch( |
|
debug=AppConfig.DEBUG_MODE, |
|
share=False, |
|
server_name="0.0.0.0", |
|
server_port=7860, |
|
auth=None, |
|
max_threads=40 |
|
) |
|
|
|
logger.info("β
Universal MCP Client started successfully!") |
|
|
|
except Exception as e: |
|
logger.error(f"β Failed to start application: {e}") |
|
raise |
|
|
|
if __name__ == "__main__": |
|
main() |
|
|