Spaces:
Running
Running
Update generate.py
Browse files- generate.py +3 -3
generate.py
CHANGED
@@ -41,11 +41,11 @@ if torch.cuda.is_available():
|
|
41 |
torch.backends.cudnn.benchmark = False
|
42 |
torch.backends.cudnn.deterministic = True
|
43 |
|
44 |
-
#
|
45 |
-
# Pass offload_model correctly as either True or False
|
46 |
offload_model_value = "True" if args.offload_model else "False"
|
47 |
-
command = f"python generate.py --task
|
48 |
|
|
|
49 |
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
50 |
stdout, stderr = process.communicate()
|
51 |
|
|
|
41 |
torch.backends.cudnn.benchmark = False
|
42 |
torch.backends.cudnn.deterministic = True
|
43 |
|
44 |
+
# Command to run the model generation
|
|
|
45 |
offload_model_value = "True" if args.offload_model else "False"
|
46 |
+
command = f"python generate.py --task t2v-1.3B --size 832*480 --ckpt_dir ./Wan2.1-T2V-1.3B --offload_model {offload_model_value} --t5_cpu --sample_shift 8 --sample_guide_scale 6 --prompt \"{args.prompt}\""
|
47 |
|
48 |
+
# Run the model
|
49 |
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
50 |
stdout, stderr = process.communicate()
|
51 |
|