Spaces:
Sleeping
Sleeping
Yaron Koresh
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -9,14 +9,14 @@ import gradio as gr
|
|
9 |
import numpy as np
|
10 |
from lxml.html import fromstring
|
11 |
from pathos.threading import ThreadPool as Pool
|
12 |
-
from diffusers import DiffusionPipeline,
|
|
|
13 |
#from diffusers.utils import export_to_gif
|
14 |
#from huggingface_hub import hf_hub_download
|
15 |
#from safetensors.torch import load_file
|
16 |
|
17 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
18 |
-
pipe =
|
19 |
-
pipe_flx = FlaxStableDiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-schnell", token=os.getenv("hf_token")).to(device)
|
20 |
|
21 |
def translate(text,lang):
|
22 |
|
@@ -79,7 +79,7 @@ def generate_random_string(length):
|
|
79 |
@spaces.GPU(duration=35)
|
80 |
def Piper(_do,neg):
|
81 |
try:
|
82 |
-
retu =
|
83 |
_do,
|
84 |
neg_prompt_ids=neg,
|
85 |
height=512,
|
|
|
9 |
import numpy as np
|
10 |
from lxml.html import fromstring
|
11 |
from pathos.threading import ThreadPool as Pool
|
12 |
+
#from diffusers import DiffusionPipeline,
|
13 |
+
from diffusers.pipelines.flux import FluxPipeline
|
14 |
#from diffusers.utils import export_to_gif
|
15 |
#from huggingface_hub import hf_hub_download
|
16 |
#from safetensors.torch import load_file
|
17 |
|
18 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
19 |
+
pipe = FluxPipeline.from_pretrained("black-forest-labs/FLUX.1-schnell", torch_dtype=torch.bfloat16, token=os.getenv("hf_token")).to(device)
|
|
|
20 |
|
21 |
def translate(text,lang):
|
22 |
|
|
|
79 |
@spaces.GPU(duration=35)
|
80 |
def Piper(_do,neg):
|
81 |
try:
|
82 |
+
retu = pipe(
|
83 |
_do,
|
84 |
neg_prompt_ids=neg,
|
85 |
height=512,
|