Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -12,6 +12,16 @@ import time
|
|
12 |
import zipfile
|
13 |
from transformers import CLIPTextModel, CLIPTokenizer, T5EncoderModel, T5TokenizerFast
|
14 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
15 |
# Description for the app
|
16 |
DESCRIPTION = """## flux realism hpc/."""
|
17 |
|
@@ -42,16 +52,6 @@ pipe_dev.to("cuda")
|
|
42 |
dtype = torch.bfloat16
|
43 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
44 |
|
45 |
-
# ---- CUDA Check ----
|
46 |
-
print("CUDA_VISIBLE_DEVICES=", os.environ.get("CUDA_VISIBLE_DEVICES"))
|
47 |
-
print("torch.__version__ =", torch.__version__)
|
48 |
-
print("torch.version.cuda =", torch.version.cuda)
|
49 |
-
print("cuda available:", torch.cuda.is_available())
|
50 |
-
print("cuda device count:", torch.cuda.device_count())
|
51 |
-
if torch.cuda.is_available():
|
52 |
-
print("current device:", torch.cuda.current_device())
|
53 |
-
print("device name:", torch.cuda.get_device_name(torch.cuda.current_device()))
|
54 |
-
|
55 |
# --- Model Loading ---
|
56 |
taef1 = AutoencoderTiny.from_pretrained("madebyollin/taef1", torch_dtype=dtype).to(device)
|
57 |
good_vae = AutoencoderKL.from_pretrained("black-forest-labs/FLUX.1-Krea-dev", subfolder="vae", torch_dtype=dtype).to(device)
|
|
|
12 |
import zipfile
|
13 |
from transformers import CLIPTextModel, CLIPTokenizer, T5EncoderModel, T5TokenizerFast
|
14 |
|
15 |
+
# ---- CUDA Check ----
|
16 |
+
print("CUDA_VISIBLE_DEVICES=", os.environ.get("CUDA_VISIBLE_DEVICES"))
|
17 |
+
print("torch.__version__ =", torch.__version__)
|
18 |
+
print("torch.version.cuda =", torch.version.cuda)
|
19 |
+
print("cuda available:", torch.cuda.is_available())
|
20 |
+
print("cuda device count:", torch.cuda.device_count())
|
21 |
+
if torch.cuda.is_available():
|
22 |
+
print("current device:", torch.cuda.current_device())
|
23 |
+
print("device name:", torch.cuda.get_device_name(torch.cuda.current_device()))
|
24 |
+
|
25 |
# Description for the app
|
26 |
DESCRIPTION = """## flux realism hpc/."""
|
27 |
|
|
|
52 |
dtype = torch.bfloat16
|
53 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
54 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
55 |
# --- Model Loading ---
|
56 |
taef1 = AutoencoderTiny.from_pretrained("madebyollin/taef1", torch_dtype=dtype).to(device)
|
57 |
good_vae = AutoencoderKL.from_pretrained("black-forest-labs/FLUX.1-Krea-dev", subfolder="vae", torch_dtype=dtype).to(device)
|