Falln87 commited on
Commit
7f38524
·
verified ·
1 Parent(s): e75c7fc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -8,11 +8,11 @@ device = "cuda" if torch.cuda.is_available() else "cpu"
8
 
9
  if torch.cuda.is_available():
10
  torch.cuda.max_memory_allocated(device=device)
11
- pipe = DiffusionPipeline.from_pretrained("stabilityai/sdxl-turbo", torch_dtype=torch.float16, variant="fp16", use_safetensors=True)
12
  pipe.enable_xformers_memory_efficient_attention()
13
  pipe = pipe.to(device)
14
  else:
15
- pipe = DiffusionPipeline.from_pretrained("stabilityai/sdxl-turbo", use_safetensors=True)
16
  pipe = pipe.to(device)
17
 
18
  MAX_SEED = np.iinfo(np.int32).max
@@ -59,8 +59,8 @@ with gr.Blocks(css=css) as demo:
59
 
60
  with gr.Column(elem_id="col-container"):
61
  gr.Markdown(f"""
62
- # Text-to-Image Gradio Template
63
- Currently running on {power_device}.
64
  """)
65
 
66
  with gr.Row():
 
8
 
9
  if torch.cuda.is_available():
10
  torch.cuda.max_memory_allocated(device=device)
11
+ pipe = DiffusionPipeline.from_pretrained("stabilityai/stable-diffusion-3-medium", torch_dtype=torch.float16, variant="fp16", use_safetensors=True)
12
  pipe.enable_xformers_memory_efficient_attention()
13
  pipe = pipe.to(device)
14
  else:
15
+ pipe = DiffusionPipeline.from_pretrained("stabilityai/stable-diffusion-3-medium", use_safetensors=True)
16
  pipe = pipe.to(device)
17
 
18
  MAX_SEED = np.iinfo(np.int32).max
 
59
 
60
  with gr.Column(elem_id="col-container"):
61
  gr.Markdown(f"""
62
+ # FallnAI DiffusionLab
63
+ running on {power_device}.
64
  """)
65
 
66
  with gr.Row():