lionelgarnier commited on
Commit
65df541
·
1 Parent(s): 57f3ad5

Temporarily disable tokenizer in image generation pipeline

Browse files
Files changed (1) hide show
  1. app.py +5 -3
app.py CHANGED
@@ -25,14 +25,16 @@ def get_image_gen_pipeline():
25
  device = "cuda" if torch.cuda.is_available() else "cpu"
26
  dtype = torch.bfloat16
27
  # Load fast tokenizer for the image pipeline
28
- tokenizer = AutoTokenizer.from_pretrained(
29
- "black-forest-labs/FLUX.1-schnell",
 
30
  use_fast=True
31
  )
32
  _image_gen_pipeline = DiffusionPipeline.from_pretrained(
33
  "black-forest-labs/FLUX.1-schnell",
 
34
  torch_dtype=dtype,
35
- tokenizer=tokenizer
36
  ).to(device)
37
  except Exception as e:
38
  print(f"Error loading image generation model: {e}")
 
25
  device = "cuda" if torch.cuda.is_available() else "cpu"
26
  dtype = torch.bfloat16
27
  # Load fast tokenizer for the image pipeline
28
+ # tokenizer = AutoTokenizer.from_pretrained(
29
+ # "black-forest-labs/FLUX.1-schnell",
30
+ # # "black-forest-labs/FLUX.1-dev",
31
  use_fast=True
32
  )
33
  _image_gen_pipeline = DiffusionPipeline.from_pretrained(
34
  "black-forest-labs/FLUX.1-schnell",
35
+ # "black-forest-labs/FLUX.1-dev",
36
  torch_dtype=dtype,
37
+ # tokenizer=tokenizer
38
  ).to(device)
39
  except Exception as e:
40
  print(f"Error loading image generation model: {e}")