Spaces:
Running
on
Zero
Running
on
Zero
update deploy
Browse files- app_text.py +8 -7
app_text.py
CHANGED
|
@@ -1,12 +1,13 @@
|
|
| 1 |
# %%
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
|
|
|
| 5 |
try:
|
| 6 |
import spaces
|
| 7 |
-
except
|
| 8 |
-
|
| 9 |
-
|
| 10 |
import gradio as gr
|
| 11 |
|
| 12 |
import torch
|
|
@@ -213,7 +214,7 @@ def _ncut_run(*args, **kwargs):
|
|
| 213 |
torch.cuda.empty_cache()
|
| 214 |
return None, "Error: " + str(e)
|
| 215 |
|
| 216 |
-
if
|
| 217 |
@spaces.GPU(duration=30)
|
| 218 |
def __ncut_run(*args, **kwargs):
|
| 219 |
return _ncut_run(*args, **kwargs)
|
|
|
|
| 1 |
# %%
|
| 2 |
+
import os
|
| 3 |
+
USE_HUGGINGFACE_ZEROGPU = os.getenv("USE_HUGGINGFACE_ZEROGPU", "False").lower() in ["true", "1", "yes"]
|
| 4 |
+
#%%
|
| 5 |
+
if USE_HUGGINGFACE_ZEROGPU: # huggingface ZeroGPU, dynamic GPU allocation
|
| 6 |
try:
|
| 7 |
import spaces
|
| 8 |
+
except:
|
| 9 |
+
USE_HUGGINGFACE_ZEROGPU = False
|
| 10 |
+
|
| 11 |
import gradio as gr
|
| 12 |
|
| 13 |
import torch
|
|
|
|
| 214 |
torch.cuda.empty_cache()
|
| 215 |
return None, "Error: " + str(e)
|
| 216 |
|
| 217 |
+
if USE_HUGGINGFACE_ZEROGPU:
|
| 218 |
@spaces.GPU(duration=30)
|
| 219 |
def __ncut_run(*args, **kwargs):
|
| 220 |
return _ncut_run(*args, **kwargs)
|