Spaces:
Running
on
T4
Running
on
T4
Commit
·
669037e
1
Parent(s):
75cdf8c
debug zerogpu
Browse files
model.py
CHANGED
@@ -109,7 +109,7 @@ class Model:
|
|
109 |
return additional_prompt if not prompt else f"{prompt}, {additional_prompt}"
|
110 |
|
111 |
@spaces.GPU #[uncomment to use ZeroGPU]
|
112 |
-
@torch.autocast("cuda")
|
113 |
def run_pipe(
|
114 |
self,
|
115 |
prompt: str,
|
@@ -121,6 +121,7 @@ class Model:
|
|
121 |
seed: int,
|
122 |
) -> list[PIL.Image.Image]:
|
123 |
generator = torch.Generator().manual_seed(seed)
|
|
|
124 |
return self.pipe(
|
125 |
prompt=prompt,
|
126 |
negative_prompt=negative_prompt,
|
|
|
109 |
return additional_prompt if not prompt else f"{prompt}, {additional_prompt}"
|
110 |
|
111 |
@spaces.GPU #[uncomment to use ZeroGPU]
|
112 |
+
# @torch.autocast("cuda")
|
113 |
def run_pipe(
|
114 |
self,
|
115 |
prompt: str,
|
|
|
121 |
seed: int,
|
122 |
) -> list[PIL.Image.Image]:
|
123 |
generator = torch.Generator().manual_seed(seed)
|
124 |
+
self.pipe.to(self.device)
|
125 |
return self.pipe(
|
126 |
prompt=prompt,
|
127 |
negative_prompt=negative_prompt,
|