Spaces:
Runtime error
Runtime error
File size: 983 Bytes
4d7c596 1d407e4 bb643a2 4d7c596 bb643a2 4d7c596 bb643a2 4d7c596 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 |
import torch, os, gc, random
import gradio as gr
from PIL import Image
from diffusers.utils import load_image
from accelerate import Accelerator
from diffusers import StableDiffusionXLPipeline
accelerator = Accelerator(cpu=True)
pipe = accelerator.prepare(StableDiffusionXLPipeline.from_pretrained("segmind/SSD-1B", torch_dtype=torch.bfloat16, use_safetensors=True, variant="fp16"))
pipe.to("cpu")
def plex(prompt,neg_prompt,stips):
apol=[]
image = pipe(prompt=[prompt]*2, negative_prompt=[neg_prompt]*2, num_inference_steps=stips, output_type="pil")
for i, imge in enumerate(image["images"]):
apol.append(imge)
return apol
iface = gr.Interface(fn=plex, inputs=[gr.Textbox(label="prompt"),gr.Textbox(label="negative prompt",value="ugly, blurry, poor quality"), gr.Slider(label="num inference steps", minimum=1, step=1, maximum=5, value=4)], outputs=gr.Gallery(label="out", columns=2))
iface.queue(max_size=1,api_open=False)
iface.launch(max_threads=1) |