Spaces:
Running
on
Zero
Running
on
Zero
back to vanilla, comment out zeroGPU flags
Browse files- hf_gradio_app.py +7 -8
hf_gradio_app.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
import os, random, time
|
2 |
-
import spaces
|
3 |
import uuid
|
4 |
import tempfile, shutil
|
5 |
from pydub import AudioSegment
|
@@ -76,11 +76,11 @@ with torch.inference_mode():
|
|
76 |
diffusion_net.requires_grad_(False).eval()
|
77 |
image_proj.requires_grad_(False).eval()
|
78 |
audio_proj.requires_grad_(False).eval()
|
79 |
-
|
80 |
-
|
81 |
noise_scheduler = FlowMatchEulerDiscreteScheduler()
|
82 |
pipeline = VideoPipeline(vae=vae, reference_net=reference_net, diffusion_net=diffusion_net, scheduler=noise_scheduler, image_proj=image_proj)
|
83 |
-
|
84 |
|
85 |
def process_audio(file_path, temp_dir):
|
86 |
# Load the audio file
|
@@ -99,11 +99,10 @@ def process_audio(file_path, temp_dir):
|
|
99 |
print(f"Processed audio saved at: {output_path}")
|
100 |
return output_path
|
101 |
|
102 |
-
#@
|
103 |
-
@
|
104 |
def generate(input_video, input_audio, seed, progress=gr.Progress(track_tqdm=True)):
|
105 |
-
|
106 |
-
|
107 |
is_shared_ui = True if "fffiloni/MEMO" in os.environ['SPACE_ID'] else False
|
108 |
temp_dir = None
|
109 |
if is_shared_ui:
|
|
|
1 |
import os, random, time
|
2 |
+
#import spaces
|
3 |
import uuid
|
4 |
import tempfile, shutil
|
5 |
from pydub import AudioSegment
|
|
|
76 |
diffusion_net.requires_grad_(False).eval()
|
77 |
image_proj.requires_grad_(False).eval()
|
78 |
audio_proj.requires_grad_(False).eval()
|
79 |
+
reference_net.enable_xformers_memory_efficient_attention()
|
80 |
+
diffusion_net.enable_xformers_memory_efficient_attention()
|
81 |
noise_scheduler = FlowMatchEulerDiscreteScheduler()
|
82 |
pipeline = VideoPipeline(vae=vae, reference_net=reference_net, diffusion_net=diffusion_net, scheduler=noise_scheduler, image_proj=image_proj)
|
83 |
+
pipeline.to(device=device, dtype=weight_dtype)
|
84 |
|
85 |
def process_audio(file_path, temp_dir):
|
86 |
# Load the audio file
|
|
|
99 |
print(f"Processed audio saved at: {output_path}")
|
100 |
return output_path
|
101 |
|
102 |
+
#@spaces.GPU(duration=240)
|
103 |
+
@torch.inference_mode()
|
104 |
def generate(input_video, input_audio, seed, progress=gr.Progress(track_tqdm=True)):
|
105 |
+
|
|
|
106 |
is_shared_ui = True if "fffiloni/MEMO" in os.environ['SPACE_ID'] else False
|
107 |
temp_dir = None
|
108 |
if is_shared_ui:
|