Yaron Koresh commited on
Commit
83d3d2d
·
verified ·
1 Parent(s): 98eceb4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -9,14 +9,14 @@ import gradio as gr
9
  import numpy as np
10
  from lxml.html import fromstring
11
  from pathos.threading import ThreadPool as Pool
12
- from diffusers import DiffusionPipeline, FlaxStableDiffusionPipeline
 
13
  #from diffusers.utils import export_to_gif
14
  #from huggingface_hub import hf_hub_download
15
  #from safetensors.torch import load_file
16
 
17
  device = "cuda" if torch.cuda.is_available() else "cpu"
18
- pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-schnell", torch_dtype=torch.bfloat16, token=os.getenv("hf_token")).to(device)
19
- pipe_flx = FlaxStableDiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-schnell", token=os.getenv("hf_token")).to(device)
20
 
21
  def translate(text,lang):
22
 
@@ -79,7 +79,7 @@ def generate_random_string(length):
79
  @spaces.GPU(duration=35)
80
  def Piper(_do,neg):
81
  try:
82
- retu = pipe_flx(
83
  _do,
84
  neg_prompt_ids=neg,
85
  height=512,
 
9
  import numpy as np
10
  from lxml.html import fromstring
11
  from pathos.threading import ThreadPool as Pool
12
+ #from diffusers import DiffusionPipeline,
13
+ from diffusers.pipelines.flux import FluxPipeline
14
  #from diffusers.utils import export_to_gif
15
  #from huggingface_hub import hf_hub_download
16
  #from safetensors.torch import load_file
17
 
18
  device = "cuda" if torch.cuda.is_available() else "cpu"
19
+ pipe = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-schnell", torch_dtype=torch.bfloat16, token=os.getenv("hf_token")).to(device)
 
20
 
21
  def translate(text,lang):
22
 
 
79
  @spaces.GPU(duration=35)
80
  def Piper(_do,neg):
81
  try:
82
+ retu = pipe(
83
  _do,
84
  neg_prompt_ids=neg,
85
  height=512,