Spaces:
Sleeping
Sleeping
File size: 5,989 Bytes
1fc8d06 51d9f34 56a99b7 d25b42d 184daa2 2073de9 184daa2 2073de9 56a99b7 184daa2 2073de9 56a99b7 2073de9 56a99b7 2073de9 56a99b7 2073de9 56a99b7 0eaf945 3315876 0eaf945 3a24bb3 0eaf945 3a24bb3 0eaf945 3a24bb3 0eaf945 9b43269 0eaf945 9b43269 0eaf945 c119da0 0eaf945 9b43269 3a24bb3 0eaf945 2e2f472 56a99b7 3315876 3a24bb3 56a99b7 d25b42d 56a99b7 0eaf945 d25b42d 56a99b7 3315876 2073de9 56a99b7 2073de9 56a99b7 2073de9 51d9f34 56a99b7 2073de9 56a99b7 0eaf945 3315876 56a99b7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 |
import gradio as gr
import torch
from diffusers import StableDiffusionPipeline
from PIL import Image
import qrcode
from qrcode.constants import ERROR_CORRECT_H
# ========= device/dtype =========
device = "cuda" if torch.cuda.is_available() else "mps" if torch.backends.mps.is_available() else "cpu"
dtype = torch.float16 if device != "cpu" else torch.float32
# ========= SD 1.5 (prompt-only) =========
sd_pipe = StableDiffusionPipeline.from_pretrained(
"runwayml/stable-diffusion-v1-5",
torch_dtype=dtype
).to(device)
def sd_generate(prompt, negative, steps, guidance, seed):
gen = torch.Generator(device=device).manual_seed(int(seed)) if int(seed) != 0 else None
def run():
return sd_pipe(
prompt,
negative_prompt=negative or "",
num_inference_steps=int(steps),
guidance_scale=float(guidance),
generator=gen
).images[0]
if device in ("cuda", "mps"):
with torch.autocast(device):
return run()
return run()
# ========= QR Maker =========
def make_qr(url: str = "http://www.mybirdfire.com", size: int = 512, border: int = 4) -> Image.Image:
qr = qrcode.QRCode(
version=None,
error_correction=ERROR_CORRECT_H, # highest EC
box_size=10,
border=border
)
qr.add_data(url.strip())
qr.make(fit=True)
img = qr.make_image(fill_color="black", back_color="white").convert("RGB")
return img.resize((size, size), resample=Image.NEAREST)
# ========= SD1.5 ControlNet stylizer (canny + tile) =========
from diffusers import StableDiffusionControlNetPipeline, ControlNetModel
from diffusers.schedulers.scheduling_euler_discrete import EulerDiscreteScheduler
from controlnet_aux import CannyDetector
BASE_15 = "runwayml/stable-diffusion-v1-5"
CN_CANNY_15 = "lllyasviel/sd-controlnet-canny"
CN_TILE_15 = "lllyasviel/control_v11f1e_sd15_tile"
_cn = {"pipe": None}
def _load_sd15_dual():
if _cn["pipe"] is None:
canny = ControlNetModel.from_pretrained(CN_CANNY_15, torch_dtype=dtype)
tile = ControlNetModel.from_pretrained(CN_TILE_15, torch_dtype=dtype)
pipe = StableDiffusionControlNetPipeline.from_pretrained(
BASE_15, controlnet=[canny, tile], torch_dtype=dtype, safety_checker=None
).to(device)
pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config) # Sampler: Euler
pipe.enable_attention_slicing()
pipe.enable_vae_slicing()
_cn["pipe"] = pipe
_cn["canny_aux"] = CannyDetector()
return _cn["pipe"], _cn["canny_aux"]
NEG_DEFAULT = "lowres, low contrast, blurry, jpeg artifacts, worst quality, bad anatomy, extra digits"
def stylize_qr_sd15(prompt: str, negative: str, steps: int, guidance: float, seed: int,
canny_low: int, canny_high: int, border: int):
# Make fresh QR each time
qr_img = make_qr("http://www.mybirdfire.com", size=512, border=int(border))
pipe, canny = _load_sd15_dual()
edges = canny(qr_img, low_threshold=int(canny_low), high_threshold=int(canny_high))
gen = torch.Generator(device=device).manual_seed(int(seed)) if int(seed) != 0 else None
# Control weights (canny, tile). Tune if too “lego” or too artsy.
cn_scales = [1.2, 0.6]
def run():
return pipe(
prompt=str(prompt),
negative_prompt=negative or NEG_DEFAULT,
image=[edges, qr_img], # canny first, tile second
controlnet_conditioning_scale=cn_scales,
num_inference_steps=int(steps),
guidance_scale=float(guidance),
generator=gen
).images[0]
if device in ("cuda", "mps"):
with torch.autocast(device):
return run()
return run()
# ========= UI =========
with gr.Blocks() as demo:
gr.Markdown("## Stable Diffusion + QR Code + ControlNet (SD1.5)")
with gr.Tab("Stable Diffusion (prompt → image)"):
prompt = gr.Textbox(label="Prompt", value="Sky, Moon, Bird, Blue, In the dark, Goddess, Sweet, Beautiful, Fantasy, Art, Anime")
negative = gr.Textbox(label="Negative Prompt", value="lowres, bad anatomy, worst quality")
steps = gr.Slider(10, 50, value=30, label="Steps", step=1)
cfg = gr.Slider(1, 12, value=7.0, label="Guidance Scale", step=0.1)
seed = gr.Number(value=0, label="Seed (0 = random)", precision=0)
out_sd = gr.Image(label="Generated Image")
gr.Button("Generate").click(sd_generate, [prompt, negative, steps, cfg, seed], out_sd)
with gr.Tab("QR Maker (mybirdfire)"):
url = gr.Textbox(label="URL/Text", value="http://www.mybirdfire.com")
size = gr.Slider(256, 1024, value=512, step=64, label="Size (px)")
quiet = gr.Slider(0, 8, value=4, step=1, label="Border (quiet zone)")
out_qr = gr.Image(label="QR Code", type="pil")
gr.Button("Generate QR").click(make_qr, [url, size, quiet], out_qr)
with gr.Tab("QR Stylizer (SD1.5 canny + tile, Euler)"):
s_prompt = gr.Textbox(label="Style Prompt", value="Sky, Moon, Bird, Blue, In the dark, Goddess, Sweet, Beautiful, Fantasy, Art, Anime")
s_negative = gr.Textbox(label="Negative Prompt", value=NEG_DEFAULT)
s_steps = gr.Slider(10, 50, value=28, label="Steps", step=1)
s_cfg = gr.Slider(1, 12, value=7.0, label="CFG", step=0.1)
s_seed = gr.Number(value=1470713301, label="Seed", precision=0)
canny_l = gr.Slider(0, 255, value=80, step=1, label="Canny low")
canny_h = gr.Slider(0, 255, value=160, step=1, label="Canny high")
s_border = gr.Slider(2, 10, value=6, step=1, label="QR border")
out_styl = gr.Image(label="Stylized QR")
gr.Button("Stylize").click(
stylize_qr_sd15,
[s_prompt, s_negative, s_steps, s_cfg, s_seed, canny_l, canny_h, s_border],
out_styl
)
if __name__ == "__main__":
demo.launch()
|