Yaron Koresh commited on
Commit
04ab354
·
verified ·
1 Parent(s): 4e72793

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -33,7 +33,7 @@ import gradio as gr
33
  from lxml.html import fromstring
34
  from huggingface_hub import hf_hub_download
35
  from safetensors.torch import load_file, save_file
36
- from diffusers import DiffusionPipeline, AutoencoderTiny, AutoencoderKL
37
  from PIL import Image, ImageDraw, ImageFont
38
  from transformers import pipeline, T5ForConditionalGeneration, T5Tokenizer
39
  from refiners.fluxion.utils import manual_seed
@@ -440,7 +440,7 @@ MAX_SEED = np.iinfo(np.int32).max
440
 
441
  seq=512
442
  image_steps=50
443
- img_accu=9.0
444
 
445
  # ui data
446
 
@@ -500,7 +500,7 @@ function custom(){
500
 
501
  # torch pipes
502
 
503
- taef1 = AutoencoderTiny.from_pretrained("madebyollin/taef1", torch_dtype=dtype).to(device)
504
  good_vae = AutoencoderKL.from_pretrained("ostris/Flex.1-alpha", subfolder="vae", torch_dtype=dtype).to(device)
505
  image_pipe = DiffusionPipeline.from_pretrained("ostris/Flex.1-alpha", torch_dtype=dtype, vae=taef1).to(device)
506
  image_pipe.enable_model_cpu_offload()
 
33
  from lxml.html import fromstring
34
  from huggingface_hub import hf_hub_download
35
  from safetensors.torch import load_file, save_file
36
+ from diffusers import DiffusionPipeline, AutoencoderKL
37
  from PIL import Image, ImageDraw, ImageFont
38
  from transformers import pipeline, T5ForConditionalGeneration, T5Tokenizer
39
  from refiners.fluxion.utils import manual_seed
 
440
 
441
  seq=512
442
  image_steps=50
443
+ img_accu=7.0
444
 
445
  # ui data
446
 
 
500
 
501
  # torch pipes
502
 
503
+ taef1 = AutoencoderKL.from_pretrained("madebyollin/taef1", torch_dtype=dtype).to(device)
504
  good_vae = AutoencoderKL.from_pretrained("ostris/Flex.1-alpha", subfolder="vae", torch_dtype=dtype).to(device)
505
  image_pipe = DiffusionPipeline.from_pretrained("ostris/Flex.1-alpha", torch_dtype=dtype, vae=taef1).to(device)
506
  image_pipe.enable_model_cpu_offload()