rahul7star commited on
Commit
035f115
Β·
verified Β·
1 Parent(s): 41394ac

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +44 -86
app.py CHANGED
@@ -2,63 +2,57 @@ import os
2
  import sys
3
  import subprocess
4
  import time
5
- import shutil
6
- import psutil
7
  from huggingface_hub import snapshot_download
8
 
 
 
 
9
  MODEL_REPO = "tencent/HunyuanVideo-Avatar"
10
- BASE_DIR = os.getcwd()
11
- WEIGHTS_DIR = os.path.join(BASE_DIR, "weights")
12
- OUTPUT_BASEPATH = os.path.join(BASE_DIR, "results-poor")
13
- CUSTOM_CACHE_DIR = "/data/hf_cache" # βœ… change if different
14
- HF_HOME_ENV = {"HF_HOME": CUSTOM_CACHE_DIR}
15
 
16
- # Paths to required checkpoints
17
- CHECKPOINT_FILE = os.path.join(WEIGHTS_DIR, "ckpts", "hunyuan-video-t2v-720p", "transformers", "mp_rank_00_model_states.pt")
18
- CHECKPOINT_FP8_FILE = os.path.join(WEIGHTS_DIR, "ckpts", "hunyuan-video-t2v-720p", "transformers", "mp_rank_00_model_states_fp8.pt")
19
 
20
- def check_disk_space(min_free_gb=10):
21
- total, used, free = shutil.disk_usage("/")
22
- free_gb = free // (2**30)
23
- print(f"πŸ’Ύ Disk space: {free_gb}GB free")
24
- if free_gb < min_free_gb:
25
- print(f"❌ Not enough disk space. {free_gb}GB available, {min_free_gb}GB required.")
26
- sys.exit(1)
27
-
28
- def clear_hf_cache():
29
- hf_cache = os.path.expanduser("~/.cache/huggingface")
30
- if os.path.exists(hf_cache):
31
- print("🧹 Cleaning Hugging Face cache...")
32
- shutil.rmtree(hf_cache)
33
 
 
 
 
34
  def download_model():
35
- print("⬇️ Downloading model to weights directory...")
36
- os.makedirs(WEIGHTS_DIR, exist_ok=True)
 
37
 
 
38
  snapshot_download(
39
  repo_id=MODEL_REPO,
40
- local_dir=WEIGHTS_DIR,
41
- local_dir_use_symlinks=False,
42
- **HF_HOME_ENV
43
  )
44
 
45
- if not os.path.isfile(CHECKPOINT_FILE):
46
- print(f"❌ Checkpoint missing at {CHECKPOINT_FILE}")
47
  sys.exit(1)
48
- if not os.path.isfile(CHECKPOINT_FP8_FILE):
49
- print(f"❌ FP8 Checkpoint missing at {CHECKPOINT_FP8_FILE}")
 
50
  sys.exit(1)
51
 
52
- print("βœ… Model downloaded successfully.")
53
- clear_hf_cache()
54
 
 
 
 
55
  def run_sample_gpu_poor():
56
  print("🎬 Running sample_gpu_poor.py...")
57
-
58
  cmd = [
59
  "python3", "hymm_sp/sample_gpu_poor.py",
60
- "--input", "assets/test.csv",
61
- "--ckpt", CHECKPOINT_FP8_FILE,
62
  "--sample-n-frames", "129",
63
  "--seed", "128",
64
  "--image-size", "704",
@@ -66,74 +60,38 @@ def run_sample_gpu_poor():
66
  "--infer-steps", "50",
67
  "--use-deepcache", "1",
68
  "--flow-shift-eval-video", "5.0",
69
- "--save-path", OUTPUT_BASEPATH,
70
  "--use-fp8",
71
  "--cpu-offload",
72
  "--infer-min"
73
  ]
74
 
75
  env = os.environ.copy()
76
- env.update(HF_HOME_ENV)
77
  env["PYTHONPATH"] = "./"
78
- env["MODEL_BASE"] = WEIGHTS_DIR
79
  env["CPU_OFFLOAD"] = "1"
80
  env["CUDA_VISIBLE_DEVICES"] = "0"
81
 
82
- result = subprocess.run(cmd, env=env)
83
- if result.returncode != 0:
84
  print("❌ sample_gpu_poor.py failed.")
85
  sys.exit(1)
 
86
 
87
- print("βœ… sample_gpu_poor.py completed.")
88
-
89
- def run_flask_audio():
90
- print("πŸš€ Starting flask_audio.py...")
91
- cmd = [
92
- "torchrun",
93
- "--nnodes=1",
94
- "--nproc_per_node=8",
95
- "--master_port=29605",
96
- "hymm_gradio/flask_audio.py",
97
- "--input", "assets/test.csv",
98
- "--ckpt", CHECKPOINT_FILE,
99
- "--sample-n-frames", "129",
100
- "--seed", "128",
101
- "--image-size", "704",
102
- "--cfg-scale", "7.5",
103
- "--infer-steps", "50",
104
- "--use-deepcache", "1",
105
- "--flow-shift-eval-video", "5.0"
106
- ]
107
- subprocess.Popen(cmd)
108
-
109
  def run_gradio_ui():
110
- print("🟒 Launching Gradio UI...")
111
  cmd = ["python3", "hymm_gradio/gradio_audio.py"]
112
  subprocess.Popen(cmd)
113
 
114
- def create_gitignore():
115
- gitignore_path = os.path.join(BASE_DIR, ".gitignore")
116
- lines_to_add = ["weights/", "results-*/", ".cache/"]
117
- with open(gitignore_path, "a+") as f:
118
- f.seek(0)
119
- existing = f.read().splitlines()
120
- for line in lines_to_add:
121
- if line not in existing:
122
- f.write(f"{line}\n")
123
-
124
  def main():
125
- create_gitignore()
126
- check_disk_space(min_free_gb=10)
127
-
128
- if os.path.isfile(CHECKPOINT_FILE) and os.path.isfile(CHECKPOINT_FP8_FILE):
129
- print("βœ… Checkpoints exist. Skipping download.")
130
- else:
131
- download_model()
132
-
133
  run_sample_gpu_poor()
134
-
135
- # Optional: Launch UIs
136
- run_flask_audio()
137
  time.sleep(5)
138
  run_gradio_ui()
139
 
 
2
  import sys
3
  import subprocess
4
  import time
5
+ from pathlib import Path
 
6
  from huggingface_hub import snapshot_download
7
 
8
+ # --------------------
9
+ # CONFIGURATION
10
+ # --------------------
11
  MODEL_REPO = "tencent/HunyuanVideo-Avatar"
12
+ HF_CACHE_DIR = Path("/home/user/.cache/huggingface/hf_cache/hunyuan_avatar")
13
+ HF_CACHE_DIR.mkdir(parents=True, exist_ok=True)
 
 
 
14
 
15
+ CHECKPOINT_FILE = HF_CACHE_DIR / "ckpts/hunyuan-video-t2v-720p/transformers/mp_rank_00_model_states.pt"
16
+ CHECKPOINT_FP8_FILE = HF_CACHE_DIR / "ckpts/hunyuan-video-t2v-720p/transformers/mp_rank_00_model_states_fp8.pt"
 
17
 
18
+ ASSETS_CSV = "assets/test.csv"
19
+ OUTPUT_DIR = Path("results-poor")
20
+ OUTPUT_DIR.mkdir(exist_ok=True)
 
 
 
 
 
 
 
 
 
 
21
 
22
+ # --------------------
23
+ # Download the model (if needed)
24
+ # --------------------
25
  def download_model():
26
+ if CHECKPOINT_FILE.exists() and CHECKPOINT_FP8_FILE.exists():
27
+ print("βœ… Model checkpoint already exists. Skipping download.")
28
+ return
29
 
30
+ print("⬇️ Downloading model into HF Space cache...")
31
  snapshot_download(
32
  repo_id=MODEL_REPO,
33
+ local_dir=HF_CACHE_DIR,
34
+ local_dir_use_symlinks=False
 
35
  )
36
 
37
+ if not CHECKPOINT_FILE.exists():
38
+ print(f"❌ Missing checkpoint: {CHECKPOINT_FILE}")
39
  sys.exit(1)
40
+
41
+ if not CHECKPOINT_FP8_FILE.exists():
42
+ print(f"❌ Missing FP8 checkpoint: {CHECKPOINT_FP8_FILE}")
43
  sys.exit(1)
44
 
45
+ print("βœ… Model download complete.")
 
46
 
47
+ # --------------------
48
+ # Run sample_gpu_poor.py
49
+ # --------------------
50
  def run_sample_gpu_poor():
51
  print("🎬 Running sample_gpu_poor.py...")
 
52
  cmd = [
53
  "python3", "hymm_sp/sample_gpu_poor.py",
54
+ "--input", ASSETS_CSV,
55
+ "--ckpt", str(CHECKPOINT_FP8_FILE),
56
  "--sample-n-frames", "129",
57
  "--seed", "128",
58
  "--image-size", "704",
 
60
  "--infer-steps", "50",
61
  "--use-deepcache", "1",
62
  "--flow-shift-eval-video", "5.0",
63
+ "--save-path", str(OUTPUT_DIR),
64
  "--use-fp8",
65
  "--cpu-offload",
66
  "--infer-min"
67
  ]
68
 
69
  env = os.environ.copy()
 
70
  env["PYTHONPATH"] = "./"
71
+ env["MODEL_BASE"] = str(HF_CACHE_DIR)
72
  env["CPU_OFFLOAD"] = "1"
73
  env["CUDA_VISIBLE_DEVICES"] = "0"
74
 
75
+ proc = subprocess.run(cmd, env=env)
76
+ if proc.returncode != 0:
77
  print("❌ sample_gpu_poor.py failed.")
78
  sys.exit(1)
79
+ print("βœ… sample_gpu_poor.py completed successfully.")
80
 
81
+ # --------------------
82
+ # Optional: Start UI
83
+ # --------------------
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
84
  def run_gradio_ui():
85
+ print("🟒 Launching Gradio interface...")
86
  cmd = ["python3", "hymm_gradio/gradio_audio.py"]
87
  subprocess.Popen(cmd)
88
 
89
+ # --------------------
90
+ # Entry point
91
+ # --------------------
 
 
 
 
 
 
 
92
  def main():
93
+ download_model()
 
 
 
 
 
 
 
94
  run_sample_gpu_poor()
 
 
 
95
  time.sleep(5)
96
  run_gradio_ui()
97