Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -36,7 +36,7 @@ OUTPUTS_DIR = Path(os.environ.get("OUTPUTS_DIR", str(ROOT / "outputs" / "autosav
|
|
36 |
OUTPUTS_DIR.mkdir(parents=True, exist_ok=True)
|
37 |
|
38 |
SPACE_TITLE = "🎵 ShortiFoley — HunyuanVideo-Foley"
|
39 |
-
SPACE_TAGLINE = "
|
40 |
WATERMARK_NOTE = "Made with ❤️ by bilsimaging.com"
|
41 |
|
42 |
# ZeroGPU limit
|
@@ -262,7 +262,7 @@ def infer_single_video(
|
|
262 |
|
263 |
|
264 |
# -------------
|
265 |
-
# Gradio UI
|
266 |
# -------------
|
267 |
def _about_html() -> str:
|
268 |
return f"""
|
@@ -459,7 +459,7 @@ Works great with media-automation in tools like **n8n**: call `load_model_tool`
|
|
459 |
"""
|
460 |
)
|
461 |
|
462 |
-
# ---- REST + MCP endpoints
|
463 |
def _download_to_tmp(url: str) -> str:
|
464 |
try:
|
465 |
import requests
|
@@ -499,7 +499,7 @@ Works great with media-automation in tools like **n8n**: call `load_model_tool`
|
|
499 |
sample_nums: int = 1,
|
500 |
) -> Dict[str, List[str]]:
|
501 |
if _model_dict is None or _cfg is None:
|
502 |
-
msg = auto_load_models(device_str="cpu")
|
503 |
if not str(msg).startswith("✅"):
|
504 |
raise RuntimeError(msg)
|
505 |
local = _normalize_video_input(video_url_or_b64)
|
@@ -546,7 +546,7 @@ if __name__ == "__main__":
|
|
546 |
logger.info("===== Application Startup =====\n")
|
547 |
prepare_once()
|
548 |
|
549 |
-
# Probe imports
|
550 |
sys.path.append(str(REPO_DIR))
|
551 |
try:
|
552 |
from hunyuanvideo_foley.utils.model_utils import load_model, denoise_process # noqa: F401
|
@@ -557,7 +557,7 @@ if __name__ == "__main__":
|
|
557 |
|
558 |
ui = create_ui()
|
559 |
|
560 |
-
# Enable MCP server
|
561 |
ui.launch(
|
562 |
server_name="0.0.0.0",
|
563 |
share=False,
|
|
|
36 |
OUTPUTS_DIR.mkdir(parents=True, exist_ok=True)
|
37 |
|
38 |
SPACE_TITLE = "🎵 ShortiFoley — HunyuanVideo-Foley"
|
39 |
+
SPACE_TAGLINE = "Bring your videos to life with AI-powered Foley"
|
40 |
WATERMARK_NOTE = "Made with ❤️ by bilsimaging.com"
|
41 |
|
42 |
# ZeroGPU limit
|
|
|
262 |
|
263 |
|
264 |
# -------------
|
265 |
+
# Gradio UI
|
266 |
# -------------
|
267 |
def _about_html() -> str:
|
268 |
return f"""
|
|
|
459 |
"""
|
460 |
)
|
461 |
|
462 |
+
# ---- REST + MCP endpoints ----
|
463 |
def _download_to_tmp(url: str) -> str:
|
464 |
try:
|
465 |
import requests
|
|
|
499 |
sample_nums: int = 1,
|
500 |
) -> Dict[str, List[str]]:
|
501 |
if _model_dict is None or _cfg is None:
|
502 |
+
msg = auto_load_models(device_str="cpu")
|
503 |
if not str(msg).startswith("✅"):
|
504 |
raise RuntimeError(msg)
|
505 |
local = _normalize_video_input(video_url_or_b64)
|
|
|
546 |
logger.info("===== Application Startup =====\n")
|
547 |
prepare_once()
|
548 |
|
549 |
+
# Probe imports
|
550 |
sys.path.append(str(REPO_DIR))
|
551 |
try:
|
552 |
from hunyuanvideo_foley.utils.model_utils import load_model, denoise_process # noqa: F401
|
|
|
557 |
|
558 |
ui = create_ui()
|
559 |
|
560 |
+
# Enable MCP server
|
561 |
ui.launch(
|
562 |
server_name="0.0.0.0",
|
563 |
share=False,
|