harry900000 commited on
Commit
e22a639
·
1 Parent(s): 67ea722

download checkpoints first

Browse files
Files changed (4) hide show
  1. app.py +16 -41
  2. checkpoints.py +66 -0
  3. download_checkpoints.py +15 -15
  4. test_environment.py +26 -0
app.py CHANGED
@@ -5,47 +5,8 @@ import gradio as gr
5
  import spaces
6
 
7
  PWD = os.path.dirname(__file__)
8
- # CHECKPOINTS_PATH = "/data/checkpoints"
9
- CHECKPOINTS_PATH = os.path.join(PWD, "checkpoints")
10
-
11
- # import subprocess
12
-
13
- # copy cudnn files
14
- os.system("cp /root/.pyenv/versions/3.10.18/lib/python3.10/site-packages/nvidia/cudnn/include/*.h /usr/local/cuda/include")
15
- os.system("cp /root/.pyenv/versions/3.10.18/lib/python3.10/site-packages/nvidia/cudnn/lib/*.so* /usr/local/cuda/lib64")
16
-
17
- os.system("apt-get update && apt-get install -qqy libmagickwand-dev")
18
-
19
- # install packages
20
- # os.system('export FLASH_ATTENTION_SKIP_CUDA_BUILD=FALSE && pip install --timeout=1000000000 --no-build-isolation "flash-attn<=2.7.4.post1"')
21
- os.system(
22
- "pip install --timeout=1000000000 https://download.pytorch.org/whl/cu128/flashinfer/flashinfer_python-0.2.5%2Bcu128torch2.7-cp38-abi3-linux_x86_64.whl"
23
- )
24
- os.system('export VLLM_ATTENTION_BACKEND=FLASHINFER && pip install "vllm==0.9.0"')
25
- os.system('pip install "decord==0.6.0"')
26
-
27
- os.system(
28
- "export CONDA_PREFIX=/usr/local/cuda && ln -sf $CONDA_PREFIX/lib/python3.10/site-packages/nvidia/*/include/* $CONDA_PREFIX/include/"
29
- )
30
- os.system(
31
- "export CONDA_PREFIX=/usr/local/cuda && ln -sf $CONDA_PREFIX/lib/python3.10/site-packages/nvidia/*/include/* $CONDA_PREFIX/include/python3.10"
32
- )
33
-
34
- os.system('pip install --timeout=1000000000 --no-build-isolation "transformer-engine[pytorch]"')
35
- os.system('pip install --timeout=1000000000 "decord==0.6.0"')
36
-
37
- os.system(
38
- 'pip install --timeout=1000000000 "git+https://github.com/nvidia-cosmos/cosmos-transfer1@e4055e39ee9c53165e85275bdab84ed20909714a"'
39
- )
40
-
41
- # setup env
42
- os.environ["CUDA_HOME"] = "/usr/local/cuda"
43
- os.environ["LD_LIBRARY_PATH"] = "$CUDA_HOME/lib:$CUDA_HOME/lib64:$LD_LIBRARY_PATH"
44
- os.environ["PATH"] = "$CUDA_HOME/bin:/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:$PATH"
45
-
46
- from test_environment import main as check_environment
47
-
48
- check_environment()
49
 
50
  try:
51
  import os
@@ -68,12 +29,26 @@ from download_checkpoints import main as download_checkpoints
68
  os.makedirs(CHECKPOINTS_PATH, exist_ok=True)
69
  download_checkpoints(hf_token="", output_dir=CHECKPOINTS_PATH, model="7b_av")
70
 
 
 
 
 
 
 
 
 
 
 
 
 
 
71
  os.environ["TOKENIZERS_PARALLELISM"] = "false" # Workaround to suppress MP warning
72
 
73
  import copy
74
  import json
75
  import random
76
  from io import BytesIO
 
77
  import torch
78
  from cosmos_transfer1.checkpoints import (
79
  BASE_7B_CHECKPOINT_AV_SAMPLE_PATH,
 
5
  import spaces
6
 
7
  PWD = os.path.dirname(__file__)
8
+ CHECKPOINTS_PATH = "/data/checkpoints"
9
+ # CHECKPOINTS_PATH = os.path.join(PWD, "checkpoints")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
 
11
  try:
12
  import os
 
29
  os.makedirs(CHECKPOINTS_PATH, exist_ok=True)
30
  download_checkpoints(hf_token="", output_dir=CHECKPOINTS_PATH, model="7b_av")
31
 
32
+
33
+ from test_environment import main as check_environment
34
+ from test_environment import setup_environment
35
+
36
+ setup_environment()
37
+
38
+ # setup env
39
+ os.environ["CUDA_HOME"] = "/usr/local/cuda"
40
+ os.environ["LD_LIBRARY_PATH"] = "$CUDA_HOME/lib:$CUDA_HOME/lib64:$LD_LIBRARY_PATH"
41
+ os.environ["PATH"] = "$CUDA_HOME/bin:/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:$PATH"
42
+
43
+ check_environment()
44
+
45
  os.environ["TOKENIZERS_PARALLELISM"] = "false" # Workaround to suppress MP warning
46
 
47
  import copy
48
  import json
49
  import random
50
  from io import BytesIO
51
+
52
  import torch
53
  from cosmos_transfer1.checkpoints import (
54
  BASE_7B_CHECKPOINT_AV_SAMPLE_PATH,
checkpoints.py ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SPDX-FileCopyrightText: Copyright (c) 2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2
+ # SPDX-License-Identifier: Apache-2.0
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ # Cosmos Transfer1 Model Checkpoints
17
+ COSMOS_TRANSFER1_7B_CHECKPOINT = "nvidia/Cosmos-Transfer1-7B"
18
+ COSMOS_TRANSFER1_7B_SAMPLE_AV_CHECKPOINT = "nvidia/Cosmos-Transfer1-7B-Sample-AV"
19
+ COSMOS_TOKENIZER_CHECKPOINT = "nvidia/Cosmos-Tokenize1-CV8x8x8-720p"
20
+ COSMOS_UPSAMPLER_CHECKPOINT = "nvidia/Cosmos-UpsamplePrompt1-12B-Transfer"
21
+ COSMOS_GUARDRAIL_CHECKPOINT = "nvidia/Cosmos-Guardrail1"
22
+
23
+ # 3rd Party Model Checkpoints
24
+ SAM2_MODEL_CHECKPOINT = "facebook/sam2-hiera-large"
25
+ DEPTH_ANYTHING_MODEL_CHECKPOINT = "depth-anything/Depth-Anything-V2-Small-hf"
26
+ GROUNDING_DINO_MODEL_CHECKPOINT = "IDEA-Research/grounding-dino-tiny"
27
+ T5_MODEL_CHECKPOINT = "google-t5/t5-11b"
28
+ LLAMA_GUARD_3_MODEL_CHECKPOINT = "meta-llama/Llama-Guard-3-8B"
29
+
30
+ # Internal Checkpoint Paths, please append _PATH to the end of the variable
31
+ BASE_7B_CHECKPOINT_PATH = f"{COSMOS_TRANSFER1_7B_CHECKPOINT}/base_model.pt"
32
+ VIS2WORLD_CONTROLNET_7B_CHECKPOINT_PATH = f"{COSMOS_TRANSFER1_7B_CHECKPOINT}/vis_control.pt"
33
+ EDGE2WORLD_CONTROLNET_7B_CHECKPOINT_PATH = f"{COSMOS_TRANSFER1_7B_CHECKPOINT}/edge_control.pt"
34
+ SEG2WORLD_CONTROLNET_7B_CHECKPOINT_PATH = f"{COSMOS_TRANSFER1_7B_CHECKPOINT}/seg_control.pt"
35
+ DEPTH2WORLD_CONTROLNET_7B_CHECKPOINT_PATH = f"{COSMOS_TRANSFER1_7B_CHECKPOINT}/depth_control.pt"
36
+ KEYPOINT2WORLD_CONTROLNET_7B_CHECKPOINT_PATH = f"{COSMOS_TRANSFER1_7B_CHECKPOINT}/keypoint_control.pt"
37
+ UPSCALER_CONTROLNET_7B_CHECKPOINT_PATH = f"{COSMOS_TRANSFER1_7B_CHECKPOINT}/4kupscaler_control.pt"
38
+ BASE_7B_CHECKPOINT_AV_SAMPLE_PATH = f"{COSMOS_TRANSFER1_7B_SAMPLE_AV_CHECKPOINT}/base_model.pt"
39
+ HDMAP2WORLD_CONTROLNET_7B_CHECKPOINT_PATH = f"{COSMOS_TRANSFER1_7B_SAMPLE_AV_CHECKPOINT}/hdmap_control.pt"
40
+ LIDAR2WORLD_CONTROLNET_7B_CHECKPOINT_PATH = f"{COSMOS_TRANSFER1_7B_SAMPLE_AV_CHECKPOINT}/lidar_control.pt"
41
+
42
+ # Transfer1-7B-SV2MV-Sample-AV checkpoints
43
+
44
+ COSMOS_TRANSFER1_7B_MV_SAMPLE_AV_CHECKPOINT = "nvidia/Cosmos-Transfer1-7B-Sample-AV-Single2MultiView"
45
+ BASE_t2w_7B_SV2MV_CHECKPOINT_AV_SAMPLE_PATH = f"{COSMOS_TRANSFER1_7B_MV_SAMPLE_AV_CHECKPOINT}/t2w_base_model.pt"
46
+ BASE_v2w_7B_SV2MV_CHECKPOINT_AV_SAMPLE_PATH = f"{COSMOS_TRANSFER1_7B_MV_SAMPLE_AV_CHECKPOINT}/v2w_base_model.pt"
47
+ SV2MV_t2w_HDMAP2WORLD_CONTROLNET_7B_CHECKPOINT_PATH = (
48
+ f"{COSMOS_TRANSFER1_7B_MV_SAMPLE_AV_CHECKPOINT}/t2w_hdmap_control.pt"
49
+ )
50
+ SV2MV_t2w_LIDAR2WORLD_CONTROLNET_7B_CHECKPOINT_PATH = (
51
+ f"{COSMOS_TRANSFER1_7B_MV_SAMPLE_AV_CHECKPOINT}/t2w_lidar_control.pt"
52
+ )
53
+ SV2MV_v2w_HDMAP2WORLD_CONTROLNET_7B_CHECKPOINT_PATH = (
54
+ f"{COSMOS_TRANSFER1_7B_MV_SAMPLE_AV_CHECKPOINT}/v2w_hdmap_control.pt"
55
+ )
56
+ SV2MV_v2w_LIDAR2WORLD_CONTROLNET_7B_CHECKPOINT_PATH = (
57
+ f"{COSMOS_TRANSFER1_7B_MV_SAMPLE_AV_CHECKPOINT}/v2w_lidar_control.pt"
58
+ )
59
+
60
+ SV2MV_t2w_HDMAP2WORLD_CONTROLNET_7B_WAYMO_CHECKPOINT_PATH = (
61
+ f"{COSMOS_TRANSFER1_7B_MV_SAMPLE_AV_CHECKPOINT}/t2w_hdmap_control_waymo.pt"
62
+ )
63
+ SV2MV_v2w_HDMAP2WORLD_CONTROLNET_7B_WAYMO_CHECKPOINT_PATH = (
64
+ f"{COSMOS_TRANSFER1_7B_MV_SAMPLE_AV_CHECKPOINT}/v2w_hdmap_control_waymo.pt"
65
+ )
66
+ EDGE2WORLD_CONTROLNET_DISTILLED_CHECKPOINT_PATH = f"{COSMOS_TRANSFER1_7B_CHECKPOINT}/edge_control_distilled.pt"
download_checkpoints.py CHANGED
@@ -1,11 +1,11 @@
1
  import hashlib
 
2
  import os
3
  import pathlib
4
  from typing import Literal
5
 
6
  # Import the checkpoint paths
7
- from cosmos_transfer1 import checkpoints
8
- from cosmos_transfer1.utils import log
9
  from huggingface_hub import login, snapshot_download
10
 
11
 
@@ -16,7 +16,7 @@ def download_checkpoint(checkpoint: str, output_dir: str) -> None:
16
  checkpoint, revision = checkpoint.split(":") if ":" in checkpoint else (checkpoint, None)
17
  checkpoint_dir = os.path.join(output_dir, checkpoint)
18
  if get_md5_checksum(output_dir, checkpoint):
19
- log.warning(f"Checkpoint {checkpoint_dir} EXISTS, skipping download... ")
20
  return
21
  else:
22
  print(f"Downloading {checkpoint} to {checkpoint_dir}")
@@ -113,20 +113,20 @@ def main(hf_token: str = os.environ.get("HF_TOKEN"), output_dir: str = "./checkp
113
  else:
114
  checkpoint_vars.append(obj)
115
 
 
 
 
 
 
 
 
 
 
 
 
 
116
  print(f"Found {len(checkpoint_vars)} checkpoints to download")
117
  print(checkpoint_vars)
118
- checkpoint_vars = [
119
- # "nvidia/Cosmos-Guardrail1",
120
- # "nvidia/Cosmos-Tokenize1-CV8x8x8-720p",
121
- # "nvidia/Cosmos-Transfer1-7B-Sample-AV-Single2MultiView",
122
- # "nvidia/Cosmos-Transfer1-7B-Sample-AV",
123
- # "nvidia/Cosmos-UpsamplePrompt1-12B-Transfer",
124
- # "depth-anything/Depth-Anything-V2-Small-hf",
125
- # "IDEA-Research/grounding-dino-tiny",
126
- # "meta-llama/Llama-Guard-3-8B",
127
- # "facebook/sam2-hiera-large",
128
- # "google-t5/t5-11b",
129
- ]
130
 
131
  # Download each checkpoint
132
  for checkpoint in checkpoint_vars:
 
1
  import hashlib
2
+ import logging
3
  import os
4
  import pathlib
5
  from typing import Literal
6
 
7
  # Import the checkpoint paths
8
+ import checkpoints
 
9
  from huggingface_hub import login, snapshot_download
10
 
11
 
 
16
  checkpoint, revision = checkpoint.split(":") if ":" in checkpoint else (checkpoint, None)
17
  checkpoint_dir = os.path.join(output_dir, checkpoint)
18
  if get_md5_checksum(output_dir, checkpoint):
19
+ logging.warning(f"Checkpoint {checkpoint_dir} EXISTS, skipping download... ")
20
  return
21
  else:
22
  print(f"Downloading {checkpoint} to {checkpoint_dir}")
 
113
  else:
114
  checkpoint_vars.append(obj)
115
 
116
+ # checkpoint_vars = [
117
+ # "nvidia/Cosmos-Guardrail1",
118
+ # "nvidia/Cosmos-Tokenize1-CV8x8x8-720p",
119
+ # "nvidia/Cosmos-Transfer1-7B-Sample-AV-Single2MultiView",
120
+ # "nvidia/Cosmos-Transfer1-7B-Sample-AV",
121
+ # "nvidia/Cosmos-UpsamplePrompt1-12B-Transfer",
122
+ # "depth-anything/Depth-Anything-V2-Small-hf",
123
+ # "IDEA-Research/grounding-dino-tiny",
124
+ # "meta-llama/Llama-Guard-3-8B",
125
+ # "facebook/sam2-hiera-large",
126
+ # "google-t5/t5-11b",
127
+ # ]
128
  print(f"Found {len(checkpoint_vars)} checkpoints to download")
129
  print(checkpoint_vars)
 
 
 
 
 
 
 
 
 
 
 
 
130
 
131
  # Download each checkpoint
132
  for checkpoint in checkpoint_vars:
test_environment.py CHANGED
@@ -19,6 +19,32 @@ import os
19
  import sys
20
 
21
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  def parse_args():
23
  parser = argparse.ArgumentParser()
24
  parser.add_argument(
 
19
  import sys
20
 
21
 
22
+ def setup_environment():
23
+ os.system("apt-get update && apt-get install -qqy libmagickwand-dev")
24
+
25
+ # install packages
26
+ # os.system('export FLASH_ATTENTION_SKIP_CUDA_BUILD=FALSE && pip install --timeout=1000000000 --no-build-isolation "flash-attn<=2.7.4.post1"')
27
+ os.system(
28
+ "pip install --timeout=1000000000 https://download.pytorch.org/whl/cu128/flashinfer/flashinfer_python-0.2.5%2Bcu128torch2.7-cp38-abi3-linux_x86_64.whl"
29
+ )
30
+ os.system('export VLLM_ATTENTION_BACKEND=FLASHINFER && pip install "vllm==0.9.0"')
31
+ os.system('pip install "decord==0.6.0"')
32
+
33
+ os.system(
34
+ "export CONDA_PREFIX=/usr/local/cuda && ln -sf $CONDA_PREFIX/lib/python3.10/site-packages/nvidia/*/include/* $CONDA_PREFIX/include/"
35
+ )
36
+ os.system(
37
+ "export CONDA_PREFIX=/usr/local/cuda && ln -sf $CONDA_PREFIX/lib/python3.10/site-packages/nvidia/*/include/* $CONDA_PREFIX/include/python3.10"
38
+ )
39
+
40
+ os.system('pip install --timeout=1000000000 --no-build-isolation "transformer-engine[pytorch]"')
41
+ os.system('pip install --timeout=1000000000 "decord==0.6.0"')
42
+
43
+ os.system(
44
+ 'pip install --timeout=1000000000 "git+https://github.com/nvidia-cosmos/cosmos-transfer1@e4055e39ee9c53165e85275bdab84ed20909714a"'
45
+ )
46
+
47
+
48
  def parse_args():
49
  parser = argparse.ArgumentParser()
50
  parser.add_argument(