Yaron Koresh commited on
Commit
ad61842
·
verified ·
1 Parent(s): d20558f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -8
app.py CHANGED
@@ -18,13 +18,12 @@ from torch.multiprocessing import Pool
18
  #from huggingface_hub import hf_hub_download
19
  #from safetensors.torch import load_file
20
  from diffusers import DiffusionPipeline
21
- from diffusers.utils import load_image
22
  #import jax
23
  #import jax.numpy as jnp
24
 
25
  def pipe_t2i():
26
  PIPE = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-schnell", torch_dtype=torch.bfloat16, token=os.getenv("hf_token")).to("cuda")
27
- PIPE.unet = torch.compile(PIPE.unet, mode="reduce-overhead", fullgraph=True)
28
  return PIPE
29
 
30
  def translate(text,lang):
@@ -86,11 +85,6 @@ def Piper(name,posi):
86
  )
87
  ret1.images[0].save(name)
88
 
89
- def tok(pipe,txt):
90
- toks = pipe.tokenizer(txt)['input_ids']
91
- print(toks)
92
- return toks
93
-
94
  css="""
95
  input, input::placeholder {
96
  text-align: center !important;
@@ -180,7 +174,7 @@ def main():
180
  with gr.Row():
181
  prompt = gr.Textbox(
182
  elem_id="prompt",
183
- placeholder="PROMPT",
184
  container=False,
185
  max_lines=1
186
  )
 
18
  #from huggingface_hub import hf_hub_download
19
  #from safetensors.torch import load_file
20
  from diffusers import DiffusionPipeline
21
+ #from diffusers.utils import load_image
22
  #import jax
23
  #import jax.numpy as jnp
24
 
25
  def pipe_t2i():
26
  PIPE = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-schnell", torch_dtype=torch.bfloat16, token=os.getenv("hf_token")).to("cuda")
 
27
  return PIPE
28
 
29
  def translate(text,lang):
 
85
  )
86
  ret1.images[0].save(name)
87
 
 
 
 
 
 
88
  css="""
89
  input, input::placeholder {
90
  text-align: center !important;
 
174
  with gr.Row():
175
  prompt = gr.Textbox(
176
  elem_id="prompt",
177
+ placeholder="DESCRIPTION",
178
  container=False,
179
  max_lines=1
180
  )