Spaces:
Running
on
Zero
Running
on
Zero
Tanut
commited on
Commit
·
9b43269
1
Parent(s):
2073de9
Fix Blend QR image
Browse files
app.py
CHANGED
@@ -45,47 +45,63 @@ def make_qr(url: str = "http://www.mybirdfire.com", size: int = 512, border: int
|
|
45 |
|
46 |
# ========= ControlNet Stylizer (SD1.5 + sd15-canny) =========
|
47 |
_cn = {"pipe": None}
|
48 |
-
def
|
49 |
if _cn["pipe"] is None:
|
50 |
from diffusers import StableDiffusionControlNetPipeline, ControlNetModel
|
51 |
-
from controlnet_aux import CannyDetector
|
52 |
from diffusers.schedulers.scheduling_euler_discrete import EulerDiscreteScheduler
|
|
|
|
|
|
|
|
|
53 |
|
54 |
-
controlnet = ControlNetModel.from_pretrained("lllyasviel/sd-controlnet-canny", torch_dtype=dtype)
|
55 |
pipe = StableDiffusionControlNetPipeline.from_pretrained(
|
56 |
"runwayml/stable-diffusion-v1-5",
|
57 |
-
controlnet=
|
58 |
torch_dtype=dtype,
|
59 |
safety_checker=None
|
60 |
).to(device)
|
61 |
-
|
62 |
pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config)
|
63 |
pipe.enable_attention_slicing()
|
64 |
pipe.enable_vae_slicing()
|
65 |
-
_cn["pipe"] = pipe
|
66 |
-
_cn["canny"] = CannyDetector()
|
67 |
-
return _cn["pipe"], _cn["canny"]
|
68 |
|
69 |
-
|
70 |
-
|
71 |
-
pipe,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
72 |
edges = canny(qr_image, low_threshold=int(canny_low), high_threshold=int(canny_high))
|
73 |
|
74 |
gen = torch.Generator(device=device).manual_seed(int(seed)) if int(seed) != 0 else None
|
|
|
|
|
|
|
|
|
75 |
def run():
|
76 |
return pipe(
|
77 |
prompt=str(prompt),
|
78 |
-
negative_prompt=negative or
|
79 |
-
image=edges,
|
|
|
80 |
num_inference_steps=int(steps),
|
81 |
guidance_scale=float(guidance),
|
82 |
generator=gen
|
83 |
).images[0]
|
|
|
84 |
if device in ("cuda", "mps"):
|
85 |
with torch.autocast(device):
|
86 |
return run()
|
87 |
return run()
|
88 |
|
|
|
89 |
# ========= UI =========
|
90 |
with gr.Blocks() as demo:
|
91 |
gr.Markdown("## Stable Diffusion + QR Code + ControlNet")
|
|
|
45 |
|
46 |
# ========= ControlNet Stylizer (SD1.5 + sd15-canny) =========
|
47 |
_cn = {"pipe": None}
|
48 |
+
def _load_controlnet_dual():
|
49 |
if _cn["pipe"] is None:
|
50 |
from diffusers import StableDiffusionControlNetPipeline, ControlNetModel
|
|
|
51 |
from diffusers.schedulers.scheduling_euler_discrete import EulerDiscreteScheduler
|
52 |
+
from controlnet_aux import CannyDetector
|
53 |
+
|
54 |
+
canny = ControlNetModel.from_pretrained("lllyasviel/sd-controlnet-canny", torch_dtype=dtype)
|
55 |
+
tile = ControlNetModel.from_pretrained("lllyasviel/control_v11f1e_sd15_tile", torch_dtype=dtype)
|
56 |
|
|
|
57 |
pipe = StableDiffusionControlNetPipeline.from_pretrained(
|
58 |
"runwayml/stable-diffusion-v1-5",
|
59 |
+
controlnet=[canny, tile], # <— dual CN
|
60 |
torch_dtype=dtype,
|
61 |
safety_checker=None
|
62 |
).to(device)
|
63 |
+
|
64 |
pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config)
|
65 |
pipe.enable_attention_slicing()
|
66 |
pipe.enable_vae_slicing()
|
|
|
|
|
|
|
67 |
|
68 |
+
_cn["pipe"] = pipe
|
69 |
+
_cn["cannyx"] = CannyDetector()
|
70 |
+
return _cn["pipe"], _cn["cannyx"]
|
71 |
+
|
72 |
+
NEG_DEFAULT = "lowres, low contrast, blurry, washed out, jpeg artifacts, worst quality"
|
73 |
+
|
74 |
+
|
75 |
+
def stylize_qr(prompt: str, negative: str, steps: int, guidance: float, seed: int,
|
76 |
+
canny_low: int, canny_high: int):
|
77 |
+
qr_image = make_qr("http://www.mybirdfire.com", size=512, border=6)
|
78 |
+
pipe, canny = _load_controlnet_dual()
|
79 |
+
|
80 |
+
# edge map for canny CN; tile CN uses the raw QR
|
81 |
edges = canny(qr_image, low_threshold=int(canny_low), high_threshold=int(canny_high))
|
82 |
|
83 |
gen = torch.Generator(device=device).manual_seed(int(seed)) if int(seed) != 0 else None
|
84 |
+
|
85 |
+
# weights per controlnet: [canny_weight, tile_weight]
|
86 |
+
cn_scales = [1.20, 0.60] # stronger structure, lighter texture
|
87 |
+
|
88 |
def run():
|
89 |
return pipe(
|
90 |
prompt=str(prompt),
|
91 |
+
negative_prompt=negative or NEG_DEFAULT,
|
92 |
+
image=[edges, qr_image],
|
93 |
+
controlnet_conditioning_scale=cn_scales,
|
94 |
num_inference_steps=int(steps),
|
95 |
guidance_scale=float(guidance),
|
96 |
generator=gen
|
97 |
).images[0]
|
98 |
+
|
99 |
if device in ("cuda", "mps"):
|
100 |
with torch.autocast(device):
|
101 |
return run()
|
102 |
return run()
|
103 |
|
104 |
+
|
105 |
# ========= UI =========
|
106 |
with gr.Blocks() as demo:
|
107 |
gr.Markdown("## Stable Diffusion + QR Code + ControlNet")
|