lionelgarnier commited on
Commit
234c60d
·
1 Parent(s): ca35e0f

change max token

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -126,7 +126,7 @@ def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, num_in
126
  num_inference_steps=num_inference_steps,
127
  generator=generator,
128
  guidance_scale=7.5, # Increased guidance scale
129
- max_sequence_length=2048
130
  ).images[0]
131
 
132
  torch.cuda.empty_cache() # Clean up GPU memory after generation
 
126
  num_inference_steps=num_inference_steps,
127
  generator=generator,
128
  guidance_scale=7.5, # Increased guidance scale
129
+ max_sequence_length=512
130
  ).images[0]
131
 
132
  torch.cuda.empty_cache() # Clean up GPU memory after generation