Hunyuan-Avatar / app.py
rahul7star's picture
Update app.py
318e971 verified
raw
history blame
3.66 kB
import os
import sys
import subprocess
import gradio as gr
from huggingface_hub import hf_hub_download
MODEL_REPO = "tencent/HunyuanVideo-Avatar"
BASE_DIR = os.getcwd()
WEIGHTS_DIR = os.path.join(BASE_DIR, "weights")
OUTPUT_BASEPATH = os.path.join(BASE_DIR, "results-poor")
ESSENTIAL_PATHS = [
# Transformers
#"hunyuan-video-t2v-720p/transformers/mp_rank_00_model_states.pt",
"hunyuan-video-t2v-720p/transformers/mp_rank_00_model_states_fp8.pt",
#"hunyuan-video-t2v-720p/transformers/mp_rank_00_model_states_fp8_map.pt",
# VAE
"hunyuan-video-t2v-720p/vae/config.json",
"hunyuan-video-t2v-720p/vae/pytorch_model.pt",
# # llava_llama_image
# "llava_llama_image/model-00001-of-00004.safetensors",
# "llava_llama_image/model-00002-of-00004.safetensors",
# "llava_llama_image/model-00003-of-00004.safetensors",
# "llava_llama_image/model-00004-of-00004.safetensors",
# "llava_llama_image/config.json",
# text_encoder_2
"text_encoder_2/config.json",
"text_encoder_2/pytorch_model.bin",
# whisper-tiny
"whisper-tiny/config.json",
"whisper-tiny/pytorch_model.bin",
"whisper-tiny/tokenizer.json",
"whisper-tiny/tokenizer_config.json",
"whisper-tiny/vocab.json",
# det_align
"det_align/config.json",
"det_align/pytorch_model.bin",
]
def download_ckpts():
logs = []
os.makedirs(os.path.join(WEIGHTS_DIR, "ckpts"), exist_ok=True)
for path in ESSENTIAL_PATHS:
local_path = os.path.join(WEIGHTS_DIR, "ckpts", path)
if os.path.exists(local_path):
logs.append(f"βœ… Exists: {path}")
continue
os.makedirs(os.path.dirname(local_path), exist_ok=True)
try:
logs.append(f"⬇️ Downloading: {path}")
hf_hub_download(
repo_id=MODEL_REPO,
filename="ckpts/" + path,
local_dir=WEIGHTS_DIR,
local_dir_use_symlinks=False,
)
except Exception as e:
logs.append(f"❌ Failed: {path} - {str(e)}")
return "\n".join(logs)
def run_sample_gpu_poor():
ckpt_fp8 = os.path.join(WEIGHTS_DIR, "ckpts", "hunyuan-video-t2v-720p", "transformers", "mp_rank_00_model_states_fp8.pt")
if not os.path.isfile(ckpt_fp8):
return f"❌ Missing checkpoint: {ckpt_fp8}"
cmd = [
"python3", "hymm_sp/sample_gpu_poor.py",
"--input", "assets/test.csv",
"--ckpt", ckpt_fp8,
"--sample-n-frames", "129",
"--seed", "128",
"--image-size", "704",
"--cfg-scale", "7.5",
"--infer-steps", "50",
"--use-deepcache", "1",
"--flow-shift-eval-video", "5.0",
"--save-path", OUTPUT_BASEPATH,
"--use-fp8",
"--cpu-offload",
"--infer-min"
]
env = os.environ.copy()
env["PYTHONPATH"] = "./"
env["MODEL_BASE"] = WEIGHTS_DIR
env["CPU_OFFLOAD"] = "1"
env["CUDA_VISIBLE_DEVICES"] = "0"
result = subprocess.run(cmd, env=env, capture_output=True, text=True)
if result.returncode != 0:
return f"❌ sample_gpu_poor.py failed:\n{result.stderr}"
return f"βœ… sample_gpu_poor.py finished:\n{result.stdout}"
def download_and_run():
log1 = download_ckpts()
log2 = run_sample_gpu_poor()
return f"{log1}\n\n---\n\n{log2}"
# Gradio UI
with gr.Blocks() as demo:
gr.Markdown("## πŸ“¦ Download Selective Checkpoints + Run sample_gpu_poor.py")
output = gr.Textbox(lines=30, label="Logs")
button = gr.Button("πŸš€ Download + Run")
button.click(fn=download_and_run, outputs=output)
if __name__ == "__main__":
demo.launch(share=False)