rahul7star commited on
Commit
0e7cb07
Β·
verified Β·
1 Parent(s): 6810bbb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +69 -23
app.py CHANGED
@@ -1,34 +1,50 @@
1
  import os
 
2
  import subprocess
3
  import time
4
  from huggingface_hub import hf_hub_download
5
 
6
- MODEL_REPO = "tencent/HunyuanVideo-Avatar"
7
- OUTPUT_DIR = "results-poor"
8
- ASSETS_CSV = "assets/test.csv"
9
 
 
10
  def download_checkpoints():
11
- print("⬇️ Downloading specific checkpoint files from Tencent repo...")
 
12
 
13
- fp8_ckpt = hf_hub_download(
14
- repo_id=MODEL_REPO,
15
- filename="ckpts/hunyuan-video-t2v-720p/transformers/mp_rank_00_model_states_fp8.pt"
16
- )
 
 
 
 
 
17
 
18
- fp32_ckpt = hf_hub_download(
19
- repo_id=MODEL_REPO,
20
- filename="ckpts/hunyuan-video-t2v-720p/transformers/mp_rank_00_model_states.pt"
21
- )
 
 
 
 
22
 
23
- print("βœ… Checkpoints downloaded.")
24
- return fp8_ckpt, fp32_ckpt
25
 
26
- def run_sample_gpu_poor(fp8_ckpt_path):
 
 
 
 
27
  print("🎬 Running sample_gpu_poor.py...")
 
28
  cmd = [
29
  "python3", "hymm_sp/sample_gpu_poor.py",
30
- "--input", ASSETS_CSV,
31
- "--ckpt", fp8_ckpt_path,
32
  "--sample-n-frames", "129",
33
  "--seed", "128",
34
  "--image-size", "704",
@@ -36,7 +52,7 @@ def run_sample_gpu_poor(fp8_ckpt_path):
36
  "--infer-steps", "50",
37
  "--use-deepcache", "1",
38
  "--flow-shift-eval-video", "5.0",
39
- "--save-path", OUTPUT_DIR,
40
  "--use-fp8",
41
  "--cpu-offload",
42
  "--infer-min"
@@ -44,21 +60,51 @@ def run_sample_gpu_poor(fp8_ckpt_path):
44
 
45
  env = os.environ.copy()
46
  env["PYTHONPATH"] = "./"
 
 
47
  env["CUDA_VISIBLE_DEVICES"] = "0"
48
 
49
  proc = subprocess.run(cmd, env=env)
50
  if proc.returncode != 0:
51
  print("❌ sample_gpu_poor.py failed.")
52
- exit(1)
 
53
  print("βœ… sample_gpu_poor.py completed successfully.")
54
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
55
  def run_gradio_ui():
56
- print("🟒 Launching Gradio interface...")
57
- subprocess.Popen(["python3", "hymm_gradio/gradio_audio.py"])
 
58
 
59
  def main():
60
- fp8_ckpt, _ = download_checkpoints()
61
- run_sample_gpu_poor(fp8_ckpt)
 
 
 
 
 
 
62
  time.sleep(5)
63
  run_gradio_ui()
64
 
 
1
  import os
2
+ import sys
3
  import subprocess
4
  import time
5
  from huggingface_hub import hf_hub_download
6
 
7
+ BASE_DIR = os.getcwd()
8
+ WEIGHTS_DIR = os.path.join(BASE_DIR, "weights")
9
+ OUTPUT_BASEPATH = os.path.join(BASE_DIR, "results-poor")
10
 
11
+ # Download specific files from Hugging Face repo
12
  def download_checkpoints():
13
+ os.makedirs(WEIGHTS_DIR, exist_ok=True)
14
+ print("⬇️ Downloading necessary checkpoint files...")
15
 
16
+ try:
17
+ # Download FP8 checkpoint
18
+ checkpoint_fp8 = hf_hub_download(
19
+ repo_id="tencent/HunyuanVideo-Avatar",
20
+ filename="ckpts/hunyuan-video-t2v-720p/transformers/mp_rank_00_model_states_fp8.pt",
21
+ cache_dir=WEIGHTS_DIR,
22
+ local_dir=WEIGHTS_DIR,
23
+ local_dir_use_symlinks=False
24
+ )
25
 
26
+ # Download normal checkpoint for Flask/Gradio UI
27
+ checkpoint = hf_hub_download(
28
+ repo_id="tencent/HunyuanVideo-Avatar",
29
+ filename="ckpts/hunyuan-video-t2v-720p/transformers/mp_rank_00_model_states.pt",
30
+ cache_dir=WEIGHTS_DIR,
31
+ local_dir=WEIGHTS_DIR,
32
+ local_dir_use_symlinks=False
33
+ )
34
 
35
+ return checkpoint, checkpoint_fp8
 
36
 
37
+ except Exception as e:
38
+ print(f"❌ Error during checkpoint download: {e}")
39
+ sys.exit(1)
40
+
41
+ def run_sample_gpu_poor(checkpoint_fp8_path):
42
  print("🎬 Running sample_gpu_poor.py...")
43
+
44
  cmd = [
45
  "python3", "hymm_sp/sample_gpu_poor.py",
46
+ "--input", "assets/test.csv",
47
+ "--ckpt", checkpoint_fp8_path,
48
  "--sample-n-frames", "129",
49
  "--seed", "128",
50
  "--image-size", "704",
 
52
  "--infer-steps", "50",
53
  "--use-deepcache", "1",
54
  "--flow-shift-eval-video", "5.0",
55
+ "--save-path", OUTPUT_BASEPATH,
56
  "--use-fp8",
57
  "--cpu-offload",
58
  "--infer-min"
 
60
 
61
  env = os.environ.copy()
62
  env["PYTHONPATH"] = "./"
63
+ env["MODEL_BASE"] = WEIGHTS_DIR
64
+ env["CPU_OFFLOAD"] = "1"
65
  env["CUDA_VISIBLE_DEVICES"] = "0"
66
 
67
  proc = subprocess.run(cmd, env=env)
68
  if proc.returncode != 0:
69
  print("❌ sample_gpu_poor.py failed.")
70
+ sys.exit(1)
71
+
72
  print("βœ… sample_gpu_poor.py completed successfully.")
73
 
74
+ def run_flask_audio(checkpoint_path):
75
+ print("πŸš€ Starting flask_audio.py...")
76
+ cmd = [
77
+ "torchrun",
78
+ "--nnodes=1",
79
+ "--nproc_per_node=1",
80
+ "--master_port=29605",
81
+ "hymm_gradio/flask_audio.py",
82
+ "--input", "assets/test.csv",
83
+ "--ckpt", checkpoint_path,
84
+ "--sample-n-frames", "129",
85
+ "--seed", "128",
86
+ "--image-size", "704",
87
+ "--cfg-scale", "7.5",
88
+ "--infer-steps", "50",
89
+ "--use-deepcache", "1",
90
+ "--flow-shift-eval-video", "5.0"
91
+ ]
92
+ subprocess.Popen(cmd)
93
+
94
  def run_gradio_ui():
95
+ print("🟒 Starting gradio_audio.py UI...")
96
+ cmd = ["python3", "hymm_gradio/gradio_audio.py"]
97
+ subprocess.Popen(cmd)
98
 
99
  def main():
100
+ # Step 1: Download only needed files from Hugging Face repo
101
+ checkpoint, checkpoint_fp8 = download_checkpoints()
102
+
103
+ # Step 2: Run poor sample video generation
104
+ run_sample_gpu_poor(checkpoint_fp8)
105
+
106
+ # Step 3: Launch Flask + Gradio UIs
107
+ run_flask_audio(checkpoint)
108
  time.sleep(5)
109
  run_gradio_ui()
110