Spaces:
Running
on
Zero
Running
on
Zero
Tanut
commited on
Commit
·
6a497cb
1
Parent(s):
3ce6abd
Test ZeroGPU
Browse files
app.py
CHANGED
@@ -2,13 +2,29 @@ import gradio as gr
|
|
2 |
import spaces
|
3 |
import torch
|
4 |
|
5 |
-
|
6 |
-
|
|
|
7 |
|
8 |
-
@spaces.GPU
|
9 |
-
def greet(n):
|
10 |
-
|
11 |
-
|
|
|
12 |
|
13 |
-
|
14 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
import spaces
|
3 |
import torch
|
4 |
|
5 |
+
# Outside of GPU context → no GPU available
|
6 |
+
zero = torch.tensor([0.0]) # stays on CPU at import time
|
7 |
+
print("startup device:", zero.device) # will log: cpu
|
8 |
|
9 |
+
@spaces.GPU # GPU is allocated ONLY while this function runs
|
10 |
+
def greet(n: float):
|
11 |
+
# Inside GPU context → CUDA is available
|
12 |
+
print("inside greet, torch.cuda.is_available():", torch.cuda.is_available())
|
13 |
+
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
14 |
|
15 |
+
# Move the tensor to the active device and add the input
|
16 |
+
t0 = zero.to(device)
|
17 |
+
tn = torch.tensor([float(n)], device=device)
|
18 |
+
out = t0 + tn
|
19 |
+
|
20 |
+
# Log where it ran; on ZeroGPU this should be cuda:0
|
21 |
+
print("tensor device during compute:", out.device)
|
22 |
+
|
23 |
+
# Return a friendly message
|
24 |
+
return f"Hello {out.item():.3f} (device: {out.device})"
|
25 |
+
|
26 |
+
demo = gr.Interface(fn=greet, inputs=gr.Number(label="Add to zero"), outputs=gr.Text(label="Result"))
|
27 |
+
|
28 |
+
if __name__ == "__main__":
|
29 |
+
# Minimal launch so the Space just builds
|
30 |
+
demo.launch()
|