lionelgarnier commited on
Commit
5ffb407
·
1 Parent(s): 86467c9

change model

Browse files
Files changed (1) hide show
  1. app.py +6 -17
app.py CHANGED
@@ -25,17 +25,10 @@ def get_image_gen_pipeline():
25
  try:
26
  device = "cuda" if torch.cuda.is_available() else "cpu"
27
  dtype = torch.bfloat16
28
- # Load fast tokenizer for the image pipeline
29
- # tokenizer = AutoTokenizer.from_pretrained(
30
- # "black-forest-labs/FLUX.1-schnell",
31
- # # "black-forest-labs/FLUX.1-dev",
32
- # use_fast=True
33
- # )
34
  _image_gen_pipeline = DiffusionPipeline.from_pretrained(
35
- "black-forest-labs/FLUX.1-schnell",
36
- # "black-forest-labs/FLUX.1-dev",
37
  torch_dtype=dtype,
38
- # tokenizer=tokenizer
39
  ).to(device)
40
  except Exception as e:
41
  print(f"Error loading image generation model: {e}")
@@ -143,9 +136,9 @@ def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, num_in
143
  return None, f"Error generating image: {str(e)}"
144
 
145
  examples = [
146
- "a tiny astronaut hatching from an egg on the moon",
147
- "a cat holding a sign that says hello world",
148
- "an anime illustration of a wiener schnitzel",
149
  ]
150
 
151
  css="""
@@ -308,11 +301,7 @@ def create_interface():
308
  )
309
 
310
  gr.Examples(
311
- examples=[
312
- "a backpack for kids, flower style",
313
- "medieval flip flops",
314
- "cat shaped cake mold"
315
- ],
316
  fn=refine_prompt,
317
  inputs = [prompt],
318
  outputs = [refined_prompt],
 
25
  try:
26
  device = "cuda" if torch.cuda.is_available() else "cpu"
27
  dtype = torch.bfloat16
 
 
 
 
 
 
28
  _image_gen_pipeline = DiffusionPipeline.from_pretrained(
29
+ # "black-forest-labs/FLUX.1-schnell",
30
+ "black-forest-labs/FLUX.1-dev",
31
  torch_dtype=dtype,
 
32
  ).to(device)
33
  except Exception as e:
34
  print(f"Error loading image generation model: {e}")
 
136
  return None, f"Error generating image: {str(e)}"
137
 
138
  examples = [
139
+ "a backpack for kids, flower style",
140
+ "medieval flip flops",
141
+ "cat shaped cake mold",
142
  ]
143
 
144
  css="""
 
301
  )
302
 
303
  gr.Examples(
304
+ examples=examples,
 
 
 
 
305
  fn=refine_prompt,
306
  inputs = [prompt],
307
  outputs = [refined_prompt],