Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -41,15 +41,19 @@ def caption2hash(caption: str) -> str:
|
|
41 |
|
42 |
# ---- Inference function ----
|
43 |
def infer(caption: str, condition_image: Image.Image, steps: int = 20, seed: int = 0, invert: bool = False):
|
|
|
44 |
img = condition_image.convert("RGB")
|
45 |
if invert:
|
46 |
img = ImageOps.invert(img)
|
|
|
47 |
cache_file = Path(f"inferences/{pil2hash(img)}_{caption2hash(caption)}.png")
|
48 |
if cache_file.exists():
|
49 |
return Image.open(cache_file)
|
50 |
|
51 |
generator = torch.manual_seed(seed)
|
|
|
52 |
output = pipe(prompt=caption, image=img, num_inference_steps=steps, generator=generator).images[0]
|
|
|
53 |
output.save(cache_file)
|
54 |
return output
|
55 |
|
|
|
41 |
|
42 |
# ---- Inference function ----
|
43 |
def infer(caption: str, condition_image: Image.Image, steps: int = 20, seed: int = 0, invert: bool = False):
|
44 |
+
print("Loading condition image")
|
45 |
img = condition_image.convert("RGB")
|
46 |
if invert:
|
47 |
img = ImageOps.invert(img)
|
48 |
+
print("Condition image inverted")
|
49 |
cache_file = Path(f"inferences/{pil2hash(img)}_{caption2hash(caption)}.png")
|
50 |
if cache_file.exists():
|
51 |
return Image.open(cache_file)
|
52 |
|
53 |
generator = torch.manual_seed(seed)
|
54 |
+
print("Starting generation...")
|
55 |
output = pipe(prompt=caption, image=img, num_inference_steps=steps, generator=generator).images[0]
|
56 |
+
print("Caching result...")
|
57 |
output.save(cache_file)
|
58 |
return output
|
59 |
|