File size: 6,131 Bytes
8ee354b
b29d610
43155d4
 
b29d610
 
 
 
8ee354b
 
 
 
43155d4
 
 
 
 
b29d610
43155d4
 
 
 
 
8ee354b
43155d4
 
 
310c81e
43155d4
8ee354b
 
43155d4
b29d610
8ee354b
43155d4
 
310c81e
b29d610
310c81e
b29d610
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
310c81e
43155d4
310c81e
43155d4
310c81e
43155d4
8ee354b
43155d4
 
 
b29d610
43155d4
 
 
 
 
b29d610
43155d4
 
 
b29d610
43155d4
b29d610
43155d4
 
 
 
310c81e
43155d4
 
b29d610
43155d4
 
 
 
310c81e
43155d4
 
b29d610
43155d4
 
 
310c81e
43155d4
 
8ee354b
b29d610
43155d4
 
 
 
b29d610
43155d4
 
 
8ee354b
43155d4
310c81e
b29d610
310c81e
 
43155d4
 
310c81e
43155d4
 
310c81e
43155d4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
"""
brain_lazy.py — Lazy loader with progress indicator
Purpose:
- Start your app FAST (no startup timeout on Hugging Face).
- Preload multimodular_modul_v7 in the background after startup.
- Keep the original module untouched.
- Shows a dynamic “loading step” for better UX.
- Does NOT attempt any runtime package installation.
"""

import os
import time
import threading
import importlib
from typing import Optional, Any

# -----------------------------------------------------------------------------
# Persistent cache
# -----------------------------------------------------------------------------
CACHE_DIR = "/home/user/app/cache"
os.environ.setdefault("TRANSFORMERS_CACHE", CACHE_DIR)
os.environ.setdefault("HF_HOME", CACHE_DIR)
os.makedirs(CACHE_DIR, exist_ok=True)

# -----------------------------------------------------------------------------
# Loader flags
# -----------------------------------------------------------------------------
_brain = None
_lock = threading.Lock()
_is_loading = False
_is_ready = False
_last_error: Optional[str] = None
_loading_step: Optional[str] = None

PROXY_WAIT_SECONDS = 25

# -----------------------------------------------------------------------------
# Internal helpers
# -----------------------------------------------------------------------------
def _set_step(step: str):
    global _loading_step
    _loading_step = step
    print(f"⏳ [brain_lazy] {step}")

def _warming_up_message(op: str) -> Any:
    return {
        "status": "warming_up",
        "operation": op,
        "detail": f"CHB is loading models in the background. Current step: {_loading_step or 'starting'}",
        "ready": _is_ready,
        "error": _last_error,
    }

def is_ready() -> bool:
    return _is_ready

def last_error() -> Optional[str]:
    return _last_error

def _ensure_loaded_with_wait(timeout_s: float) -> Optional[Any]:
    if _brain is not None and _is_ready:
        return _brain

    if not _is_loading and _brain is None:
        threading.Thread(target=_load_brain_blocking, daemon=True).start()

    waited = 0.0
    interval = 0.25
    while waited < timeout_s:
        if _brain is not None and _is_ready:
            return _brain
        time.sleep(interval)
        waited += interval
    return None

# -----------------------------------------------------------------------------
# Brain loader
# -----------------------------------------------------------------------------
def _load_brain_blocking() -> Optional[Any]:
    global _brain, _is_ready, _is_loading, _last_error
    with _lock:
        if _brain is not None:
            return _brain
        if _is_loading:
            return None

        _is_loading = True
        _last_error = None
        start = time.time()
        try:
            _set_step("importing multimodular_modul_v7")
            brain = importlib.import_module("multimodular_modul_v7")

            warm_started = False

            if hasattr(brain, "init"):
                _set_step("running brain.init()")
                try:
                    brain.init()
                    warm_started = True
                except Exception as e:
                    print(f"⚠️ brain.init() failed: {e}")

            if hasattr(brain, "warm_up"):
                _set_step("running brain.warm_up()")
                try:
                    brain.warm_up()
                    warm_started = True
                except Exception as e:
                    print(f"⚠️ brain.warm_up() failed: {e}")

            if not warm_started and hasattr(brain, "process_input"):
                _set_step("minimal warm-up via process_input('ping')")
                try:
                    _ = brain.process_input("ping")
                except Exception as e:
                    print(f"⚠️ Minimal warm-up failed: {e}")

            _brain = brain
            _is_ready = True
            _set_step("ready")
            print(f"✅ [brain_lazy] Brain loaded in {time.time() - start:.2f}s")
            return _brain
        except Exception as e:
            _last_error = str(e)
            _set_step("failed")
            print(f"❌ [brain_lazy] Brain load failed: {e}")
            return None
        finally:
            _is_loading = False

# -----------------------------------------------------------------------------
# Background startup
# -----------------------------------------------------------------------------
def _background_startup():
    _load_brain_blocking()

threading.Thread(target=_background_startup, daemon=True).start()

# -----------------------------------------------------------------------------
# Public proxy API
# -----------------------------------------------------------------------------
def process_input(text: str) -> Any:
    brain = _ensure_loaded_with_wait(PROXY_WAIT_SECONDS)
    if brain is None:
        return _warming_up_message("process_input")
    return brain.process_input(text)

def search_kb(query: str) -> Any:
    brain = _ensure_loaded_with_wait(PROXY_WAIT_SECONDS)
    if brain is None:
        return _warming_up_message("search_kb")
    return brain.search_kb(query)

def upload_media(file_path: str) -> Any:
    brain = _ensure_loaded_with_wait(PROXY_WAIT_SECONDS)
    if brain is None:
        return _warming_up_message("upload_media")
    return brain.upload_media(file_path)

def backup_brain() -> Any:
    brain = _ensure_loaded_with_wait(PROXY_WAIT_SECONDS)
    if brain is None:
        return _warming_up_message("backup_brain")
    return brain.backup_brain()

def restore_brain() -> Any:
    brain = _ensure_loaded_with_wait(PROXY_WAIT_SECONDS)
    if brain is None:
        return _warming_up_message("restore_brain")
    return brain.restore_brain()

def show_creative_skills() -> Any:
    brain = _ensure_loaded_with_wait(PROXY_WAIT_SECONDS)
    if brain is None:
        return _warming_up_message("show_creative_skills")
    return brain.show_creative_skills()

def sync_status() -> Any:
    brain = _ensure_loaded_with_wait(PROXY_WAIT_SECONDS)
    if brain is None:
        return _warming_up_message("sync_status")
    return brain.sync_status()