Spaces:
Runtime error
Runtime error
import torch, os, gc, random | |
import gradio as gr | |
from PIL import Image | |
from diffusers.utils import load_image | |
from accelerate import Accelerator | |
from diffusers import StableDiffusionXLPipeline | |
accelerator = Accelerator(cpu=True) | |
pipe = accelerator.prepare(StableDiffusionXLPipeline.from_pretrained("segmind/SSD-1B", torch_dtype=torch.bfloat16, use_safetensors=True, variant="fp16")) | |
pipe.to("cpu") | |
def plex(prompt,neg_prompt,stips): | |
apol=[] | |
image = pipe(prompt=[prompt]*2, negative_prompt=[neg_prompt]*2, num_inference_steps=stips, output_type="pil") | |
for i, imge in enumerate(image["images"]): | |
apol.append(imge) | |
return apol | |
iface = gr.Interface(fn=plex, inputs=[gr.Textbox(label="prompt"),gr.Textbox(label="negative prompt",value="ugly, blurry, poor quality"), gr.Slider(label="num inference steps", minimum=1, step=1, maximum=5, value=4)], outputs=gr.Gallery(label="out", columns=2)) | |
iface.queue(max_size=1,api_open=False) | |
iface.launch(max_threads=1) |