Spaces:
Running
on
Zero
Running
on
Zero
File size: 5,847 Bytes
1fc8d06 51d9f34 56a99b7 d25b42d 184daa2 2073de9 184daa2 2073de9 56a99b7 184daa2 2073de9 56a99b7 2073de9 56a99b7 2073de9 56a99b7 2073de9 56a99b7 2e2f472 3315876 2e2f472 3315876 2e2f472 3315876 2e2f472 3315876 3a24bb3 2073de9 3a24bb3 3315876 3a24bb3 3315876 9b43269 3315876 9b43269 3315876 c119da0 2e2f472 3315876 9b43269 3a24bb3 3315876 2e2f472 56a99b7 3315876 3a24bb3 56a99b7 d25b42d 56a99b7 2073de9 d25b42d 56a99b7 3315876 2073de9 56a99b7 2073de9 56a99b7 2073de9 51d9f34 56a99b7 2073de9 56a99b7 4a9e124 2e2f472 3315876 56a99b7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 |
import gradio as gr
import torch
from diffusers import StableDiffusionPipeline
from PIL import Image
import qrcode
from qrcode.constants import ERROR_CORRECT_H
# ========= device/dtype =========
device = "cuda" if torch.cuda.is_available() else "mps" if torch.backends.mps.is_available() else "cpu"
dtype = torch.float16 if device != "cpu" else torch.float32
# ========= SD 1.5 (prompt-only) =========
sd_pipe = StableDiffusionPipeline.from_pretrained(
"runwayml/stable-diffusion-v1-5",
torch_dtype=dtype
).to(device)
def sd_generate(prompt, negative, steps, guidance, seed):
gen = torch.Generator(device=device).manual_seed(int(seed)) if int(seed) != 0 else None
def run():
return sd_pipe(
prompt,
negative_prompt=negative or "",
num_inference_steps=int(steps),
guidance_scale=float(guidance),
generator=gen
).images[0]
if device in ("cuda", "mps"):
with torch.autocast(device):
return run()
return run()
# ========= QR Maker =========
def make_qr(url: str = "http://www.mybirdfire.com", size: int = 512, border: int = 4) -> Image.Image:
qr = qrcode.QRCode(
version=None,
error_correction=ERROR_CORRECT_H, # highest EC
box_size=10,
border=border
)
qr.add_data(url.strip())
qr.make(fit=True)
img = qr.make_image(fill_color="black", back_color="white").convert("RGB")
return img.resize((size, size), resample=Image.NEAREST)
# ========= SDXL dual ControlNet stylizer (canny + softedge) =========
from diffusers import StableDiffusionXLControlNetPipeline, ControlNetModel
from diffusers.schedulers.scheduling_euler_discrete import EulerDiscreteScheduler
from controlnet_aux import CannyDetector
SDXL_MODEL = "stabilityai/stable-diffusion-xl-base-1.0" # swap to your SDXL anime model if desired
CN_CANNY = "diffusers/controlnet-canny-sdxl-1.0"
CN_SOFT = "diffusers/controlnet-softedge-sdxl-1.0" # <-- replaces non-existent tile SDXL
_sdxl = {"pipe": None}
def _load_sdxl_dual():
if _sdxl["pipe"] is None:
cn1 = ControlNetModel.from_pretrained(CN_CANNY, torch_dtype=dtype)
cn2 = ControlNetModel.from_pretrained(CN_SOFT, torch_dtype=dtype)
pipe = StableDiffusionXLControlNetPipeline.from_pretrained(
SDXL_MODEL, controlnet=[cn1, cn2], torch_dtype=dtype, safety_checker=None
).to(device)
pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config)
pipe.enable_vae_slicing()
_sdxl["pipe"] = pipe
_sdxl["canny"] = CannyDetector()
return _sdxl["pipe"], _sdxl["canny"]
NEG = "lowres, low contrast, blurry, jpeg artifacts, worst quality, extra digits, bad anatomy"
def stylize_qr_sdxl(prompt: str, steps: int=28, guidance: float=7.0, seed: int=1470713301,
canny_low: int=80, canny_high: int=160):
# 1) make a strong QR @1024
qr = make_qr("http://www.mybirdfire.com", size=1024, border=6)
# 2) edges for canny CN
pipe, canny = _load_sdxl_dual()
edges = canny(qr, low_threshold=int(canny_low), high_threshold=int(canny_high))
gen = torch.Generator(device=device).manual_seed(int(seed)) if int(seed)!=0 else None
# Control weights + schedule (canny, softedge)
cn_scales = [1.1, 0.6]
cn_start = [0.25, 0.00]
cn_end = [0.95, 1.00]
def run():
img = pipe(
prompt=prompt,
negative_prompt=NEG,
image=[edges, qr], # canny first, softedge second
controlnet_conditioning_scale=cn_scales,
control_guidance_start=cn_start,
control_guidance_end=cn_end,
num_inference_steps=int(steps),
guidance_scale=float(guidance),
generator=gen
).images[0]
return img
if device in ("cuda", "mps"):
with torch.autocast(device):
return run()
return run()
# ========= UI =========
with gr.Blocks() as demo:
gr.Markdown("## Stable Diffusion + QR Code + ControlNet")
with gr.Tab("Stable Diffusion (prompt → image)"):
prompt = gr.Textbox(label="Prompt", value="Sky, Moon, Bird, Blue, In the dark, Goddess, Sweet, Beautiful, Fantasy, Art, Anime")
negative = gr.Textbox(label="Negative Prompt", value="lowres, bad anatomy, worst quality")
steps = gr.Slider(10, 50, value=30, label="Steps", step=1)
cfg = gr.Slider(1, 12, value=7.0, label="Guidance Scale", step=0.1)
seed = gr.Number(value=0, label="Seed (0 = random)", precision=0)
out_sd = gr.Image(label="Generated Image")
gr.Button("Generate").click(sd_generate, [prompt, negative, steps, cfg, seed], out_sd)
with gr.Tab("QR Maker (mybirdfire)"):
url = gr.Textbox(label="URL/Text", value="http://www.mybirdfire.com")
size = gr.Slider(256, 1024, value=512, step=64, label="Size (px)")
quiet = gr.Slider(0, 8, value=4, step=1, label="Border (quiet zone)")
out_qr = gr.Image(label="QR Code", type="pil")
gr.Button("Generate QR").click(make_qr, [url, size, quiet], out_qr)
with gr.Tab("QR Stylizer (SDXL canny + softedge)"):
p = gr.Textbox(label="Prompt", value="Sky, Moon, Bird, Blue, In the dark, Goddess, Sweet, Beautiful, Fantasy, Art, Anime")
st = gr.Slider(20, 40, 28, step=1, label="Steps")
cfg = gr.Slider(4.5, 9.0, 7.0, step=0.1, label="CFG")
sd = gr.Number(value=1470713301, label="Seed", precision=0)
cl = gr.Slider(0, 255, 80, step=1, label="Canny low")
ch = gr.Slider(0, 255, 160, step=1, label="Canny high")
out = gr.Image(label="Stylized QR (SDXL)")
gr.Button("Stylize").click(stylize_qr_sdxl, [p, st, cfg, sd, cl, ch], out)
if __name__ == "__main__":
demo.launch()
|