Spaces:
Sleeping
Sleeping
Update utils.py
Browse files
utils.py
CHANGED
@@ -20,7 +20,6 @@ from diffusers import StableDiffusionPipeline, DiffusionPipeline
|
|
20 |
height, width = 512, 512
|
21 |
guidance_scale = 8
|
22 |
custom_loss_scale = 200
|
23 |
-
num_inference_steps = 50
|
24 |
batch_size = 1
|
25 |
torch_device = "cuda" if torch.cuda.is_available() else "mps" if torch.backends.mps.is_available() else "cpu"
|
26 |
|
@@ -68,7 +67,7 @@ def latents_to_pil(latents):
|
|
68 |
pil_images = [Image.fromarray(image) for image in images]
|
69 |
return pil_images
|
70 |
|
71 |
-
def generate_latents(prompts, seed_nums, loss_apply=False):
|
72 |
|
73 |
generator = torch.manual_seed(seed_nums)
|
74 |
|
@@ -141,16 +140,16 @@ def generate_latents(prompts, seed_nums, loss_apply=False):
|
|
141 |
def pil_to_np(image):
|
142 |
return np.array(image)
|
143 |
|
144 |
-
def generate_gradio_images(prompt):
|
145 |
# after loss is applied
|
146 |
latents_list = []
|
147 |
loss_flag = False
|
148 |
for seed_no, sd in zip(seed_list, sdconcepts):
|
149 |
prompts = [f'{prompt} {sd}']
|
150 |
-
latents = generate_latents(prompts, seed_no, loss_apply=loss_flag)
|
151 |
latents_list.append(latents)
|
152 |
loss_flag = True
|
153 |
for seed_no, sd in zip(seed_list, sdconcepts):
|
154 |
prompts = [f'{prompt} {sd}']
|
155 |
-
latents = generate_latents(prompts, seed_no, loss_apply=loss_flag)
|
156 |
latents_list.append(latents)
|
|
|
20 |
height, width = 512, 512
|
21 |
guidance_scale = 8
|
22 |
custom_loss_scale = 200
|
|
|
23 |
batch_size = 1
|
24 |
torch_device = "cuda" if torch.cuda.is_available() else "mps" if torch.backends.mps.is_available() else "cpu"
|
25 |
|
|
|
67 |
pil_images = [Image.fromarray(image) for image in images]
|
68 |
return pil_images
|
69 |
|
70 |
+
def generate_latents(prompts, num_inference_steps, seed_nums, loss_apply=False):
|
71 |
|
72 |
generator = torch.manual_seed(seed_nums)
|
73 |
|
|
|
140 |
def pil_to_np(image):
|
141 |
return np.array(image)
|
142 |
|
143 |
+
def generate_gradio_images(prompt, num_inference_steps):
|
144 |
# after loss is applied
|
145 |
latents_list = []
|
146 |
loss_flag = False
|
147 |
for seed_no, sd in zip(seed_list, sdconcepts):
|
148 |
prompts = [f'{prompt} {sd}']
|
149 |
+
latents = generate_latents(prompts,num_inference_steps, seed_no, loss_apply=loss_flag)
|
150 |
latents_list.append(latents)
|
151 |
loss_flag = True
|
152 |
for seed_no, sd in zip(seed_list, sdconcepts):
|
153 |
prompts = [f'{prompt} {sd}']
|
154 |
+
latents = generate_latents(prompts,num_inference_steps, seed_no, loss_apply=loss_flag)
|
155 |
latents_list.append(latents)
|