patrickvonplaten commited on
Commit
9f8624d
·
1 Parent(s): 8517f95

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -10
app.py CHANGED
@@ -9,24 +9,26 @@ dtype = torch.float16
9
  device = torch.device("cuda:0")
10
 
11
  pipeline = AutoPipelineForText2Image.from_pretrained("stabilityai/stable-diffusion-xl-base-1.0", variant="fp16", torch_dtype=dtype)
12
- pipeline.vae.register_to_config("force_upcast"=False)
13
  pipeline.to(device)
14
 
15
 
16
  if USE_TORCH_COMPILE:
17
  pipeline.unet = torch.compile(pipeline.unet, mode="reduce-overhead", fullgraph=True)
18
 
19
- def generate(num_images_per_prompt: int = 1):
 
20
  print("Version", torch.__version__)
21
- prompt = 77 * "a"
22
- num_inference_steps = 40
23
- start_time = time.time()
24
- pipeline(prompt, num_images_per_prompt=num_images_per_prompt, num_inference_steps=num_inference_steps).images
25
- end_time = time.time()
26
 
27
-
28
- print(f"For {num_inference_steps} steps", end_time - start_time)
29
- print("Avg per step", (end_time - start_time) / num_inference_steps)
 
 
 
 
 
 
30
 
31
 
32
  with gr.Blocks(css="style.css") as demo:
 
9
  device = torch.device("cuda:0")
10
 
11
  pipeline = AutoPipelineForText2Image.from_pretrained("stabilityai/stable-diffusion-xl-base-1.0", variant="fp16", torch_dtype=dtype)
12
+ pipeline.vae.register_to_config(force_upcast=False)
13
  pipeline.to(device)
14
 
15
 
16
  if USE_TORCH_COMPILE:
17
  pipeline.unet = torch.compile(pipeline.unet, mode="reduce-overhead", fullgraph=True)
18
 
19
+ def generate(num_images_per_prompt: int = 1):\
20
+ print("Start...")
21
  print("Version", torch.__version__)
 
 
 
 
 
22
 
23
+ for _ in range(3):
24
+ prompt = 77 * "a"
25
+ num_inference_steps = 40
26
+ start_time = time.time()
27
+ pipeline(prompt, num_images_per_prompt=num_images_per_prompt, num_inference_steps=num_inference_steps).images
28
+ end_time = time.time()
29
+
30
+ print(f"For {num_inference_steps} steps", end_time - start_time)
31
+ print("Avg per step", (end_time - start_time) / num_inference_steps)
32
 
33
 
34
  with gr.Blocks(css="style.css") as demo: