Spaces:
Running
Running
Update brain_lazy.py
Browse files- brain_lazy.py +52 -79
brain_lazy.py
CHANGED
@@ -1,21 +1,21 @@
|
|
1 |
"""
|
2 |
-
brain_lazy.py — Lazy loader with
|
3 |
Purpose:
|
4 |
- Start your app FAST (no startup timeout on Hugging Face).
|
5 |
-
-
|
6 |
-
-
|
7 |
-
-
|
|
|
8 |
"""
|
9 |
|
10 |
import os
|
11 |
import time
|
12 |
import threading
|
13 |
import importlib
|
14 |
-
import subprocess
|
15 |
from typing import Optional, Any
|
16 |
|
17 |
# -----------------------------------------------------------------------------
|
18 |
-
# Persistent cache
|
19 |
# -----------------------------------------------------------------------------
|
20 |
CACHE_DIR = "/home/user/app/cache"
|
21 |
os.environ.setdefault("TRANSFORMERS_CACHE", CACHE_DIR)
|
@@ -30,140 +30,113 @@ _lock = threading.Lock()
|
|
30 |
_is_loading = False
|
31 |
_is_ready = False
|
32 |
_last_error: Optional[str] = None
|
33 |
-
|
34 |
|
35 |
-
# Max wait for background preload before returning warming-up
|
36 |
PROXY_WAIT_SECONDS = 25
|
37 |
|
38 |
-
# List of heavy packages to install automatically
|
39 |
-
HEAVY_PACKAGES = ["torch", "torchvision", "timm", "diffusers", "faiss-cpu"]
|
40 |
-
|
41 |
# -----------------------------------------------------------------------------
|
42 |
-
#
|
43 |
# -----------------------------------------------------------------------------
|
44 |
-
def
|
45 |
-
global
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
|
60 |
# -----------------------------------------------------------------------------
|
61 |
# Brain loader
|
62 |
# -----------------------------------------------------------------------------
|
63 |
def _load_brain_blocking() -> Optional[Any]:
|
64 |
-
"""
|
65 |
-
Imports multimodular_modul_v7 and performs light warm-up.
|
66 |
-
"""
|
67 |
global _brain, _is_ready, _is_loading, _last_error
|
68 |
with _lock:
|
69 |
if _brain is not None:
|
70 |
return _brain
|
71 |
if _is_loading:
|
72 |
-
return None
|
73 |
|
74 |
_is_loading = True
|
75 |
_last_error = None
|
76 |
start = time.time()
|
77 |
try:
|
78 |
-
|
79 |
-
if not _packages_installed:
|
80 |
-
_install_heavy_packages()
|
81 |
-
|
82 |
-
print("⏳ [brain_lazy] Importing multimodular_modul_v7 ...")
|
83 |
brain = importlib.import_module("multimodular_modul_v7")
|
84 |
|
85 |
-
# Optional warm-up
|
86 |
warm_started = False
|
|
|
87 |
if hasattr(brain, "init"):
|
|
|
88 |
try:
|
89 |
brain.init()
|
90 |
warm_started = True
|
91 |
-
print("✅ [brain_lazy] brain.init() finished.")
|
92 |
except Exception as e:
|
93 |
print(f"⚠️ brain.init() failed: {e}")
|
94 |
|
95 |
if hasattr(brain, "warm_up"):
|
|
|
96 |
try:
|
97 |
brain.warm_up()
|
98 |
warm_started = True
|
99 |
-
print("✅ [brain_lazy] brain.warm_up() finished.")
|
100 |
except Exception as e:
|
101 |
print(f"⚠️ brain.warm_up() failed: {e}")
|
102 |
|
103 |
if not warm_started and hasattr(brain, "process_input"):
|
|
|
104 |
try:
|
105 |
_ = brain.process_input("ping")
|
106 |
-
print("✅ [brain_lazy] minimal warm-up via process_input('ping') done.")
|
107 |
except Exception as e:
|
108 |
print(f"⚠️ Minimal warm-up failed: {e}")
|
109 |
|
110 |
_brain = brain
|
111 |
_is_ready = True
|
|
|
112 |
print(f"✅ [brain_lazy] Brain loaded in {time.time() - start:.2f}s")
|
113 |
return _brain
|
114 |
except Exception as e:
|
115 |
_last_error = str(e)
|
|
|
116 |
print(f"❌ [brain_lazy] Brain load failed: {e}")
|
117 |
return None
|
118 |
finally:
|
119 |
_is_loading = False
|
120 |
|
121 |
# -----------------------------------------------------------------------------
|
122 |
-
# Background
|
123 |
# -----------------------------------------------------------------------------
|
124 |
def _background_startup():
|
125 |
-
"""
|
126 |
-
Run immediately on import: install packages and preload brain asynchronously.
|
127 |
-
"""
|
128 |
-
_install_heavy_packages()
|
129 |
_load_brain_blocking()
|
130 |
|
131 |
threading.Thread(target=_background_startup, daemon=True).start()
|
132 |
|
133 |
-
# -----------------------------------------------------------------------------
|
134 |
-
# Helpers
|
135 |
-
# -----------------------------------------------------------------------------
|
136 |
-
def is_ready() -> bool:
|
137 |
-
return _is_ready
|
138 |
-
|
139 |
-
def last_error() -> Optional[str]:
|
140 |
-
return _last_error
|
141 |
-
|
142 |
-
def _ensure_loaded_with_wait(timeout_s: float) -> Optional[Any]:
|
143 |
-
if _brain is not None and _is_ready:
|
144 |
-
return _brain
|
145 |
-
|
146 |
-
if not _is_loading and _brain is None:
|
147 |
-
threading.Thread(target=_load_brain_blocking, daemon=True).start()
|
148 |
-
|
149 |
-
waited = 0.0
|
150 |
-
interval = 0.25
|
151 |
-
while waited < timeout_s:
|
152 |
-
if _brain is not None and _is_ready:
|
153 |
-
return _brain
|
154 |
-
time.sleep(interval)
|
155 |
-
waited += interval
|
156 |
-
return None
|
157 |
-
|
158 |
-
def _warming_up_message(op: str) -> Any:
|
159 |
-
return {
|
160 |
-
"status": "warming_up",
|
161 |
-
"operation": op,
|
162 |
-
"detail": "CHB is installing packages and loading models in the background. Please retry shortly.",
|
163 |
-
"ready": _is_ready,
|
164 |
-
"error": _last_error,
|
165 |
-
}
|
166 |
-
|
167 |
# -----------------------------------------------------------------------------
|
168 |
# Public proxy API
|
169 |
# -----------------------------------------------------------------------------
|
|
|
1 |
"""
|
2 |
+
brain_lazy.py — Lazy loader with progress indicator
|
3 |
Purpose:
|
4 |
- Start your app FAST (no startup timeout on Hugging Face).
|
5 |
+
- Preload multimodular_modul_v7 in the background after startup.
|
6 |
+
- Keep the original module untouched.
|
7 |
+
- Shows a dynamic “loading step” for better UX.
|
8 |
+
- Does NOT attempt any runtime package installation.
|
9 |
"""
|
10 |
|
11 |
import os
|
12 |
import time
|
13 |
import threading
|
14 |
import importlib
|
|
|
15 |
from typing import Optional, Any
|
16 |
|
17 |
# -----------------------------------------------------------------------------
|
18 |
+
# Persistent cache
|
19 |
# -----------------------------------------------------------------------------
|
20 |
CACHE_DIR = "/home/user/app/cache"
|
21 |
os.environ.setdefault("TRANSFORMERS_CACHE", CACHE_DIR)
|
|
|
30 |
_is_loading = False
|
31 |
_is_ready = False
|
32 |
_last_error: Optional[str] = None
|
33 |
+
_loading_step: Optional[str] = None
|
34 |
|
|
|
35 |
PROXY_WAIT_SECONDS = 25
|
36 |
|
|
|
|
|
|
|
37 |
# -----------------------------------------------------------------------------
|
38 |
+
# Internal helpers
|
39 |
# -----------------------------------------------------------------------------
|
40 |
+
def _set_step(step: str):
|
41 |
+
global _loading_step
|
42 |
+
_loading_step = step
|
43 |
+
print(f"⏳ [brain_lazy] {step}")
|
44 |
+
|
45 |
+
def _warming_up_message(op: str) -> Any:
|
46 |
+
return {
|
47 |
+
"status": "warming_up",
|
48 |
+
"operation": op,
|
49 |
+
"detail": f"CHB is loading models in the background. Current step: {_loading_step or 'starting'}",
|
50 |
+
"ready": _is_ready,
|
51 |
+
"error": _last_error,
|
52 |
+
}
|
53 |
+
|
54 |
+
def is_ready() -> bool:
|
55 |
+
return _is_ready
|
56 |
+
|
57 |
+
def last_error() -> Optional[str]:
|
58 |
+
return _last_error
|
59 |
+
|
60 |
+
def _ensure_loaded_with_wait(timeout_s: float) -> Optional[Any]:
|
61 |
+
if _brain is not None and _is_ready:
|
62 |
+
return _brain
|
63 |
+
|
64 |
+
if not _is_loading and _brain is None:
|
65 |
+
threading.Thread(target=_load_brain_blocking, daemon=True).start()
|
66 |
+
|
67 |
+
waited = 0.0
|
68 |
+
interval = 0.25
|
69 |
+
while waited < timeout_s:
|
70 |
+
if _brain is not None and _is_ready:
|
71 |
+
return _brain
|
72 |
+
time.sleep(interval)
|
73 |
+
waited += interval
|
74 |
+
return None
|
75 |
|
76 |
# -----------------------------------------------------------------------------
|
77 |
# Brain loader
|
78 |
# -----------------------------------------------------------------------------
|
79 |
def _load_brain_blocking() -> Optional[Any]:
|
|
|
|
|
|
|
80 |
global _brain, _is_ready, _is_loading, _last_error
|
81 |
with _lock:
|
82 |
if _brain is not None:
|
83 |
return _brain
|
84 |
if _is_loading:
|
85 |
+
return None
|
86 |
|
87 |
_is_loading = True
|
88 |
_last_error = None
|
89 |
start = time.time()
|
90 |
try:
|
91 |
+
_set_step("importing multimodular_modul_v7")
|
|
|
|
|
|
|
|
|
92 |
brain = importlib.import_module("multimodular_modul_v7")
|
93 |
|
|
|
94 |
warm_started = False
|
95 |
+
|
96 |
if hasattr(brain, "init"):
|
97 |
+
_set_step("running brain.init()")
|
98 |
try:
|
99 |
brain.init()
|
100 |
warm_started = True
|
|
|
101 |
except Exception as e:
|
102 |
print(f"⚠️ brain.init() failed: {e}")
|
103 |
|
104 |
if hasattr(brain, "warm_up"):
|
105 |
+
_set_step("running brain.warm_up()")
|
106 |
try:
|
107 |
brain.warm_up()
|
108 |
warm_started = True
|
|
|
109 |
except Exception as e:
|
110 |
print(f"⚠️ brain.warm_up() failed: {e}")
|
111 |
|
112 |
if not warm_started and hasattr(brain, "process_input"):
|
113 |
+
_set_step("minimal warm-up via process_input('ping')")
|
114 |
try:
|
115 |
_ = brain.process_input("ping")
|
|
|
116 |
except Exception as e:
|
117 |
print(f"⚠️ Minimal warm-up failed: {e}")
|
118 |
|
119 |
_brain = brain
|
120 |
_is_ready = True
|
121 |
+
_set_step("ready")
|
122 |
print(f"✅ [brain_lazy] Brain loaded in {time.time() - start:.2f}s")
|
123 |
return _brain
|
124 |
except Exception as e:
|
125 |
_last_error = str(e)
|
126 |
+
_set_step("failed")
|
127 |
print(f"❌ [brain_lazy] Brain load failed: {e}")
|
128 |
return None
|
129 |
finally:
|
130 |
_is_loading = False
|
131 |
|
132 |
# -----------------------------------------------------------------------------
|
133 |
+
# Background startup
|
134 |
# -----------------------------------------------------------------------------
|
135 |
def _background_startup():
|
|
|
|
|
|
|
|
|
136 |
_load_brain_blocking()
|
137 |
|
138 |
threading.Thread(target=_background_startup, daemon=True).start()
|
139 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
140 |
# -----------------------------------------------------------------------------
|
141 |
# Public proxy API
|
142 |
# -----------------------------------------------------------------------------
|