tsi-org commited on
Commit
99cb15b
·
verified ·
1 Parent(s): b1c2087

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -1
app.py CHANGED
@@ -347,8 +347,11 @@ def video_generation_handler_streaming(prompt, seed=42, fps=15, save_frames=True
347
  rnd = torch.Generator(gpu).manual_seed(int(seed))
348
  pipeline._initialize_kv_cache(1, torch.float16, device=gpu)
349
  pipeline._initialize_crossattn_cache(1, torch.float16, device=gpu)
350
- noise = torch.randn([1, 21, 16, 60, 104], device=gpu, dtype=torch.float16, generator=rnd)
351
 
 
 
 
 
352
  vae_cache, latents_cache = None, None
353
  if not APP_STATE["current_use_taehv"] and not args.trt:
354
  vae_cache = [c.to(device=gpu, dtype=torch.float16) for c in ZERO_VAE_CACHE]
 
347
  rnd = torch.Generator(gpu).manual_seed(int(seed))
348
  pipeline._initialize_kv_cache(1, torch.float16, device=gpu)
349
  pipeline._initialize_crossattn_cache(1, torch.float16, device=gpu)
 
350
 
351
+ # Calculate total frames needed for your desired number of blocks
352
+ total_frames = num_blocks * pipeline.num_frame_per_block
353
+ noise = torch.randn([1, total_frames, 16, 60, 104], device=gpu, dtype=torch.float16, generator=rnd)
354
+
355
  vae_cache, latents_cache = None, None
356
  if not APP_STATE["current_use_taehv"] and not args.trt:
357
  vae_cache = [c.to(device=gpu, dtype=torch.float16) for c in ZERO_VAE_CACHE]