da03 commited on
Commit
b189664
·
1 Parent(s): f0e8a7a
Files changed (1) hide show
  1. utils.py +3 -3
utils.py CHANGED
@@ -86,10 +86,10 @@ def sample_frame(model: LatentDiffusion, prompt: str, image_sequence: torch.Tens
86
  print ('samples_ddim.shape', samples_ddim.shape)
87
  x_samples_ddim = samples_ddim[:, :3]
88
  # upsample to 512 x 384
89
- #x_samples_ddim = torch.nn.functional.interpolate(x_samples_ddim, size=(384, 512), mode='bilinear')
90
  # create a 512 x 384 image and paste the samples_ddim into the center
91
- x_samples_ddim = torch.zeros((1, 3, 384, 512))
92
- x_samples_ddim[:, :, 128:128+48, 160:160+64] = samples_ddim[:, :3]
93
  else:
94
  x_samples_ddim = model.decode_first_stage(samples_ddim)
95
  #x_samples_ddim = pos_map.to(c['c_concat'].device).unsqueeze(0).expand(-1, 3, -1, -1)
 
86
  print ('samples_ddim.shape', samples_ddim.shape)
87
  x_samples_ddim = samples_ddim[:, :3]
88
  # upsample to 512 x 384
89
+ x_samples_ddim = torch.nn.functional.interpolate(x_samples_ddim, size=(384, 512), mode='bilinear')
90
  # create a 512 x 384 image and paste the samples_ddim into the center
91
+ #x_samples_ddim = torch.zeros((1, 3, 384, 512))
92
+ #x_samples_ddim[:, :, 128:128+48, 160:160+64] = samples_ddim[:, :3]
93
  else:
94
  x_samples_ddim = model.decode_first_stage(samples_ddim)
95
  #x_samples_ddim = pos_map.to(c['c_concat'].device).unsqueeze(0).expand(-1, 3, -1, -1)