Update app.py
Browse files
app.py
CHANGED
@@ -129,7 +129,7 @@ def create_demo(
|
|
129 |
|
130 |
noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)
|
131 |
noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)
|
132 |
-
latents = scheduler.step(
|
133 |
|
134 |
with torch.no_grad():
|
135 |
latents = latents / vae.config.scaling_factor
|
@@ -207,12 +207,12 @@ if __name__ == "__main__":
|
|
207 |
class AppArgs:
|
208 |
model_name: str = "danhtran2mind/ghibli-fine-tuned-sd-2.1"
|
209 |
device: str = "cuda" if torch.cuda.is_available() else "cpu"
|
210 |
-
|
211 |
-
|
212 |
|
213 |
parser = HfArgumentParser([AppArgs])
|
214 |
args_tuple = parser.parse_args_into_dataclasses()
|
215 |
args = args_tuple[0]
|
216 |
|
217 |
demo = create_demo(args.model_name, args.device)
|
218 |
-
demo.launch()
|
|
|
129 |
|
130 |
noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)
|
131 |
noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)
|
132 |
+
latents = scheduler.step(noise_pred, t, latents).prev_sample
|
133 |
|
134 |
with torch.no_grad():
|
135 |
latents = latents / vae.config.scaling_factor
|
|
|
207 |
class AppArgs:
|
208 |
model_name: str = "danhtran2mind/ghibli-fine-tuned-sd-2.1"
|
209 |
device: str = "cuda" if torch.cuda.is_available() else "cpu"
|
210 |
+
port: int = 7860
|
211 |
+
share: bool = False # Set to True for public sharing (Hugging Face Spaces)
|
212 |
|
213 |
parser = HfArgumentParser([AppArgs])
|
214 |
args_tuple = parser.parse_args_into_dataclasses()
|
215 |
args = args_tuple[0]
|
216 |
|
217 |
demo = create_demo(args.model_name, args.device)
|
218 |
+
demo.launch(server_port=args.port, share=args.share)
|