Spaces:
Sleeping
Sleeping
Yaron Koresh
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -9,8 +9,8 @@ import gradio as gr
|
|
9 |
import numpy as np
|
10 |
from lxml.html import fromstring
|
11 |
from pathos.threading import ThreadPool as Pool
|
12 |
-
from diffusers import DiffusionPipeline, AnimateDiffPipeline, MotionAdapter,
|
13 |
-
from diffusers.pipelines.flux import FluxPipeline
|
14 |
from diffusers.utils import export_to_gif
|
15 |
from huggingface_hub import hf_hub_download
|
16 |
from safetensors.torch import load_file
|
@@ -21,27 +21,13 @@ dtype = torch.float16
|
|
21 |
step = 2
|
22 |
repo = "ByteDance/AnimateDiff-Lightning"
|
23 |
ckpt = f"animatediff_lightning_{step}step_diffusers.safetensors"
|
24 |
-
|
25 |
|
26 |
adapter = MotionAdapter().to(device, dtype)
|
27 |
adapter.load_state_dict(load_file(hf_hub_download(repo ,ckpt), device=device))
|
|
|
|
|
28 |
|
29 |
-
pipe = AnimateDiffPipeline.from_pretrained(
|
30 |
-
model_id,
|
31 |
-
motion_adapter=adapter,
|
32 |
-
token=os.getenv("hf_token")
|
33 |
-
)
|
34 |
-
|
35 |
-
scheduler = DDIMScheduler.from_pretrained(
|
36 |
-
model_id,
|
37 |
-
subfolder="scheduler",
|
38 |
-
clip_sample=False,
|
39 |
-
timestep_spacing="linspace",
|
40 |
-
steps_offset=1,
|
41 |
-
token=os.getenv("hf_token")
|
42 |
-
)
|
43 |
-
|
44 |
-
pipe.scheduler = scheduler
|
45 |
pipe.enable_vae_slicing()
|
46 |
pipe.enable_vae_tiling()
|
47 |
|
|
|
9 |
import numpy as np
|
10 |
from lxml.html import fromstring
|
11 |
from pathos.threading import ThreadPool as Pool
|
12 |
+
from diffusers import DiffusionPipeline, AnimateDiffPipeline, MotionAdapter, EulerDiscreteScheduler
|
13 |
+
#from diffusers.pipelines.flux import FluxPipeline
|
14 |
from diffusers.utils import export_to_gif
|
15 |
from huggingface_hub import hf_hub_download
|
16 |
from safetensors.torch import load_file
|
|
|
21 |
step = 2
|
22 |
repo = "ByteDance/AnimateDiff-Lightning"
|
23 |
ckpt = f"animatediff_lightning_{step}step_diffusers.safetensors"
|
24 |
+
base = "emilianJR/epiCRealism"
|
25 |
|
26 |
adapter = MotionAdapter().to(device, dtype)
|
27 |
adapter.load_state_dict(load_file(hf_hub_download(repo ,ckpt), device=device))
|
28 |
+
pipe = AnimateDiffPipeline.from_pretrained(base, motion_adapter=adapter, torch_dtype=dtype).to(device)
|
29 |
+
pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config, timestep_spacing="trailing", beta_schedule="linear")
|
30 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
31 |
pipe.enable_vae_slicing()
|
32 |
pipe.enable_vae_tiling()
|
33 |
|