rahul7star commited on
Commit
3196e8f
·
verified ·
1 Parent(s): 77adc33

Update generate.py

Browse files
Files changed (1) hide show
  1. generate.py +3 -3
generate.py CHANGED
@@ -41,11 +41,11 @@ if torch.cuda.is_available():
41
  torch.backends.cudnn.benchmark = False
42
  torch.backends.cudnn.deterministic = True
43
 
44
- # Run the model (Ensure that `generate.py` includes these new params in its model call)
45
- # Pass offload_model correctly as either True or False
46
  offload_model_value = "True" if args.offload_model else "False"
47
- command = f"python generate.py --task {args.task} --size {args.size} --frame_num {args.frame_num} --sample_steps {args.sample_steps} --ckpt_dir {args.ckpt_dir} --offload_model {offload_model_value} --t5_cpu {args.t5_cpu} --sample_shift {args.sample_shift} --sample_guide_scale {args.sample_guide_scale} --prompt \"{args.prompt}\""
48
 
 
49
  process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
50
  stdout, stderr = process.communicate()
51
 
 
41
  torch.backends.cudnn.benchmark = False
42
  torch.backends.cudnn.deterministic = True
43
 
44
+ # Command to run the model generation
 
45
  offload_model_value = "True" if args.offload_model else "False"
46
+ command = f"python generate.py --task t2v-1.3B --size 832*480 --ckpt_dir ./Wan2.1-T2V-1.3B --offload_model {offload_model_value} --t5_cpu --sample_shift 8 --sample_guide_scale 6 --prompt \"{args.prompt}\""
47
 
48
+ # Run the model
49
  process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
50
  stdout, stderr = process.communicate()
51