Commit
·
c51632b
1
Parent(s):
40d1ba9
up
Browse files- control_net_canny.py +2 -1
- control_net_shuffle.py +4 -1
- mass_open_controlnet_pr.sh +1 -1
- run_local.py +2 -2
control_net_canny.py
CHANGED
|
@@ -10,6 +10,7 @@ import numpy as np
|
|
| 10 |
|
| 11 |
from diffusers import (
|
| 12 |
ControlNetModel,
|
|
|
|
| 13 |
StableDiffusionControlNetPipeline,
|
| 14 |
UniPCMultistepScheduler,
|
| 15 |
)
|
|
@@ -35,7 +36,7 @@ pipe = StableDiffusionControlNetPipeline.from_pretrained(
|
|
| 35 |
"runwayml/stable-diffusion-v1-5", controlnet=controlnet, torch_dtype=torch.float16
|
| 36 |
)
|
| 37 |
|
| 38 |
-
pipe.scheduler =
|
| 39 |
pipe.enable_model_cpu_offload()
|
| 40 |
|
| 41 |
generator = torch.manual_seed(33)
|
|
|
|
| 10 |
|
| 11 |
from diffusers import (
|
| 12 |
ControlNetModel,
|
| 13 |
+
EulerDiscreteScheduler,
|
| 14 |
StableDiffusionControlNetPipeline,
|
| 15 |
UniPCMultistepScheduler,
|
| 16 |
)
|
|
|
|
| 36 |
"runwayml/stable-diffusion-v1-5", controlnet=controlnet, torch_dtype=torch.float16
|
| 37 |
)
|
| 38 |
|
| 39 |
+
pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config)
|
| 40 |
pipe.enable_model_cpu_offload()
|
| 41 |
|
| 42 |
generator = torch.manual_seed(33)
|
control_net_shuffle.py
CHANGED
|
@@ -8,6 +8,8 @@ from controlnet_aux import ContentShuffleDetector
|
|
| 8 |
|
| 9 |
from diffusers import (
|
| 10 |
ControlNetModel,
|
|
|
|
|
|
|
| 11 |
StableDiffusionControlNetPipeline,
|
| 12 |
UniPCMultistepScheduler,
|
| 13 |
)
|
|
@@ -29,7 +31,8 @@ pipe = StableDiffusionControlNetPipeline.from_pretrained(
|
|
| 29 |
"runwayml/stable-diffusion-v1-5", controlnet=controlnet, torch_dtype=torch.float16
|
| 30 |
)
|
| 31 |
|
| 32 |
-
pipe.scheduler = UniPCMultistepScheduler.from_config(pipe.scheduler.config)
|
|
|
|
| 33 |
pipe.enable_model_cpu_offload()
|
| 34 |
|
| 35 |
generator = torch.manual_seed(33)
|
|
|
|
| 8 |
|
| 9 |
from diffusers import (
|
| 10 |
ControlNetModel,
|
| 11 |
+
EulerDiscreteScheduler,
|
| 12 |
+
EulerDiscreteScheduler,
|
| 13 |
StableDiffusionControlNetPipeline,
|
| 14 |
UniPCMultistepScheduler,
|
| 15 |
)
|
|
|
|
| 31 |
"runwayml/stable-diffusion-v1-5", controlnet=controlnet, torch_dtype=torch.float16
|
| 32 |
)
|
| 33 |
|
| 34 |
+
# pipe.scheduler = UniPCMultistepScheduler.from_config(pipe.scheduler.config)
|
| 35 |
+
pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config)
|
| 36 |
pipe.enable_model_cpu_offload()
|
| 37 |
|
| 38 |
generator = torch.manual_seed(33)
|
mass_open_controlnet_pr.sh
CHANGED
|
@@ -3,4 +3,4 @@ while read p; do
|
|
| 3 |
echo "-------------------------------"
|
| 4 |
echo "Open PR for $p"
|
| 5 |
python convert_flax_to_pt.py $p
|
| 6 |
-
done
|
|
|
|
| 3 |
echo "-------------------------------"
|
| 4 |
echo "Open PR for $p"
|
| 5 |
python convert_flax_to_pt.py $p
|
| 6 |
+
done <model_ids.txt
|
run_local.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
| 1 |
#!/usr/bin/env python3
|
| 2 |
-
from diffusers import
|
| 3 |
import time
|
| 4 |
import os
|
| 5 |
from huggingface_hub import HfApi
|
|
@@ -17,7 +17,7 @@ api = HfApi()
|
|
| 17 |
start_time = time.time()
|
| 18 |
#pipe = StableDiffusionPipeline.from_pretrained(path, torch_dtype=torch.float16, device_map="auto")
|
| 19 |
#pipe.scheduler = HeunDiscreteScheduler.from_config(pipe.scheduler.config)
|
| 20 |
-
pipe =
|
| 21 |
pipe.scheduler = KDPM2AncestralDiscreteScheduler.from_config(pipe.scheduler.config)
|
| 22 |
|
| 23 |
# compel = Compel(tokenizer=pipe.tokenizer, text_encoder=pipe.text_encoder)
|
|
|
|
| 1 |
#!/usr/bin/env python3
|
| 2 |
+
from diffusers import StableDiffusionControlNetPipeline
|
| 3 |
import time
|
| 4 |
import os
|
| 5 |
from huggingface_hub import HfApi
|
|
|
|
| 17 |
start_time = time.time()
|
| 18 |
#pipe = StableDiffusionPipeline.from_pretrained(path, torch_dtype=torch.float16, device_map="auto")
|
| 19 |
#pipe.scheduler = HeunDiscreteScheduler.from_config(pipe.scheduler.config)
|
| 20 |
+
pipe = StableDiffusionControlNetPipeline.from_pretrained(path, torch_dtype=torch.float16, safety_checker=None)
|
| 21 |
pipe.scheduler = KDPM2AncestralDiscreteScheduler.from_config(pipe.scheduler.config)
|
| 22 |
|
| 23 |
# compel = Compel(tokenizer=pipe.tokenizer, text_encoder=pipe.text_encoder)
|