Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -22,7 +22,7 @@ sys.excepthook = _crash_trap
|
|
22 |
# ============================================================
|
23 |
|
24 |
import gradio as gr
|
25 |
-
import
|
26 |
from huggingface_hub import snapshot_download
|
27 |
from loguru import logger
|
28 |
import torch, torchaudio
|
@@ -291,13 +291,19 @@ def preprocess_video(in_path: str) -> Tuple[str, float]:
|
|
291 |
final_dur = min(dur, float(MAX_SECS))
|
292 |
return str(processed), final_dur
|
293 |
|
|
|
|
|
|
|
|
|
|
|
|
|
294 |
# ========= Inference (ZeroGPU) =========
|
295 |
-
@
|
296 |
@torch.inference_mode()
|
297 |
def run_model(video_path: str, prompt_text: str,
|
298 |
guidance_scale: float = 4.5,
|
299 |
num_inference_steps: int = 50,
|
300 |
-
sample_nums: int = 1)
|
301 |
"""
|
302 |
Native inference (no shell). Returns ([wav_paths], sample_rate).
|
303 |
"""
|
|
|
22 |
# ============================================================
|
23 |
|
24 |
import gradio as gr
|
25 |
+
from spaces import GPU # <-- explicit import so startup checker can see it
|
26 |
from huggingface_hub import snapshot_download
|
27 |
from loguru import logger
|
28 |
import torch, torchaudio
|
|
|
291 |
final_dur = min(dur, float(MAX_SECS))
|
292 |
return str(processed), final_dur
|
293 |
|
294 |
+
# ========= ZeroGPU marker (so startup checker is happy) =========
|
295 |
+
@GPU(duration=5)
|
296 |
+
def _zgpu_marker(_: int = 0) -> int:
|
297 |
+
"""No-op; only to advertise that this Space has GPU-decorated functions."""
|
298 |
+
return _
|
299 |
+
|
300 |
# ========= Inference (ZeroGPU) =========
|
301 |
+
@GPU(duration=ZEROGPU_DURATION)
|
302 |
@torch.inference_mode()
|
303 |
def run_model(video_path: str, prompt_text: str,
|
304 |
guidance_scale: float = 4.5,
|
305 |
num_inference_steps: int = 50,
|
306 |
+
sample_nums: int = 1):
|
307 |
"""
|
308 |
Native inference (no shell). Returns ([wav_paths], sample_rate).
|
309 |
"""
|