da03 commited on
Commit
b21a8cb
·
1 Parent(s): 414c4ae
Files changed (1) hide show
  1. main.py +3 -3
main.py CHANGED
@@ -23,12 +23,12 @@ device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
23
 
24
 
25
  DEBUG_MODE = False
26
- DEBUG_MODE_2 = False
27
  NUM_MAX_FRAMES = 2
28
 
29
  SCREEN_WIDTH = 512
30
  SCREEN_HEIGHT = 384
31
- NUM_SAMPLING_STEPS = 1000
32
 
33
  with open('latent_stats.json', 'r') as f:
34
  latent_stats = json.load(f)
@@ -163,7 +163,7 @@ def _process_frame_sync(model, inputs):
163
  if use_rnn:
164
  sample_latent = output_from_rnn[:, :16]
165
  else:
166
- NUM_SAMPLING_STEPS = 8
167
  if NUM_SAMPLING_STEPS >= 1000:
168
  sample_latent = model.p_sample_loop(cond={'c_concat': output_from_rnn}, shape=[1, *LATENT_DIMS], return_intermediates=False, verbose=True)
169
  else:
 
23
 
24
 
25
  DEBUG_MODE = False
26
+ DEBUG_MODE_2 = True
27
  NUM_MAX_FRAMES = 2
28
 
29
  SCREEN_WIDTH = 512
30
  SCREEN_HEIGHT = 384
31
+ NUM_SAMPLING_STEPS = 32
32
 
33
  with open('latent_stats.json', 'r') as f:
34
  latent_stats = json.load(f)
 
163
  if use_rnn:
164
  sample_latent = output_from_rnn[:, :16]
165
  else:
166
+ #NUM_SAMPLING_STEPS = 8
167
  if NUM_SAMPLING_STEPS >= 1000:
168
  sample_latent = model.p_sample_loop(cond={'c_concat': output_from_rnn}, shape=[1, *LATENT_DIMS], return_intermediates=False, verbose=True)
169
  else: