Spaces:
Running
Running
""" | |
brain_lazy.py — Lazy loader with progress indicator | |
Purpose: | |
- Start your app FAST (no startup timeout on Hugging Face). | |
- Preload multimodular_modul_v7 in the background after startup. | |
- Keep the original module untouched. | |
- Shows a dynamic “loading step” for better UX. | |
- Does NOT attempt any runtime package installation. | |
""" | |
import os | |
import time | |
import threading | |
import importlib | |
from typing import Optional, Any | |
# ----------------------------------------------------------------------------- | |
# Persistent cache | |
# ----------------------------------------------------------------------------- | |
CACHE_DIR = "/home/user/app/cache" | |
os.environ.setdefault("TRANSFORMERS_CACHE", CACHE_DIR) | |
os.environ.setdefault("HF_HOME", CACHE_DIR) | |
os.makedirs(CACHE_DIR, exist_ok=True) | |
# ----------------------------------------------------------------------------- | |
# Loader flags | |
# ----------------------------------------------------------------------------- | |
_brain = None | |
_lock = threading.Lock() | |
_is_loading = False | |
_is_ready = False | |
_last_error: Optional[str] = None | |
_loading_step: Optional[str] = None | |
PROXY_WAIT_SECONDS = 25 | |
# ----------------------------------------------------------------------------- | |
# Internal helpers | |
# ----------------------------------------------------------------------------- | |
def _set_step(step: str): | |
global _loading_step | |
_loading_step = step | |
print(f"⏳ [brain_lazy] {step}") | |
def _warming_up_message(op: str) -> Any: | |
return { | |
"status": "warming_up", | |
"operation": op, | |
"detail": f"CHB is loading models in the background. Current step: {_loading_step or 'starting'}", | |
"ready": _is_ready, | |
"error": _last_error, | |
} | |
def is_ready() -> bool: | |
return _is_ready | |
def last_error() -> Optional[str]: | |
return _last_error | |
def _ensure_loaded_with_wait(timeout_s: float) -> Optional[Any]: | |
if _brain is not None and _is_ready: | |
return _brain | |
if not _is_loading and _brain is None: | |
threading.Thread(target=_load_brain_blocking, daemon=True).start() | |
waited = 0.0 | |
interval = 0.25 | |
while waited < timeout_s: | |
if _brain is not None and _is_ready: | |
return _brain | |
time.sleep(interval) | |
waited += interval | |
return None | |
# ----------------------------------------------------------------------------- | |
# Brain loader | |
# ----------------------------------------------------------------------------- | |
def _load_brain_blocking() -> Optional[Any]: | |
global _brain, _is_ready, _is_loading, _last_error | |
with _lock: | |
if _brain is not None: | |
return _brain | |
if _is_loading: | |
return None | |
_is_loading = True | |
_last_error = None | |
start = time.time() | |
try: | |
_set_step("importing multimodular_modul_v7") | |
brain = importlib.import_module("multimodular_modul_v7") | |
warm_started = False | |
if hasattr(brain, "init"): | |
_set_step("running brain.init()") | |
try: | |
brain.init() | |
warm_started = True | |
except Exception as e: | |
print(f"⚠️ brain.init() failed: {e}") | |
if hasattr(brain, "warm_up"): | |
_set_step("running brain.warm_up()") | |
try: | |
brain.warm_up() | |
warm_started = True | |
except Exception as e: | |
print(f"⚠️ brain.warm_up() failed: {e}") | |
if not warm_started and hasattr(brain, "process_input"): | |
_set_step("minimal warm-up via process_input('ping')") | |
try: | |
_ = brain.process_input("ping") | |
except Exception as e: | |
print(f"⚠️ Minimal warm-up failed: {e}") | |
_brain = brain | |
_is_ready = True | |
_set_step("ready") | |
print(f"✅ [brain_lazy] Brain loaded in {time.time() - start:.2f}s") | |
return _brain | |
except Exception as e: | |
_last_error = str(e) | |
_set_step("failed") | |
print(f"❌ [brain_lazy] Brain load failed: {e}") | |
return None | |
finally: | |
_is_loading = False | |
# ----------------------------------------------------------------------------- | |
# Background startup | |
# ----------------------------------------------------------------------------- | |
def _background_startup(): | |
_load_brain_blocking() | |
threading.Thread(target=_background_startup, daemon=True).start() | |
# ----------------------------------------------------------------------------- | |
# Public proxy API | |
# ----------------------------------------------------------------------------- | |
def process_input(text: str) -> Any: | |
brain = _ensure_loaded_with_wait(PROXY_WAIT_SECONDS) | |
if brain is None: | |
return _warming_up_message("process_input") | |
return brain.process_input(text) | |
def search_kb(query: str) -> Any: | |
brain = _ensure_loaded_with_wait(PROXY_WAIT_SECONDS) | |
if brain is None: | |
return _warming_up_message("search_kb") | |
return brain.search_kb(query) | |
def upload_media(file_path: str) -> Any: | |
brain = _ensure_loaded_with_wait(PROXY_WAIT_SECONDS) | |
if brain is None: | |
return _warming_up_message("upload_media") | |
return brain.upload_media(file_path) | |
def backup_brain() -> Any: | |
brain = _ensure_loaded_with_wait(PROXY_WAIT_SECONDS) | |
if brain is None: | |
return _warming_up_message("backup_brain") | |
return brain.backup_brain() | |
def restore_brain() -> Any: | |
brain = _ensure_loaded_with_wait(PROXY_WAIT_SECONDS) | |
if brain is None: | |
return _warming_up_message("restore_brain") | |
return brain.restore_brain() | |
def show_creative_skills() -> Any: | |
brain = _ensure_loaded_with_wait(PROXY_WAIT_SECONDS) | |
if brain is None: | |
return _warming_up_message("show_creative_skills") | |
return brain.show_creative_skills() | |
def sync_status() -> Any: | |
brain = _ensure_loaded_with_wait(PROXY_WAIT_SECONDS) | |
if brain is None: | |
return _warming_up_message("sync_status") | |
return brain.sync_status() | |