fix: debug CUDA issue and update dependencies
Browse files- app.py +9 -2
- requirements.txt +0 -4
app.py
CHANGED
|
@@ -249,9 +249,16 @@ with gr.Blocks() as demo:
|
|
| 249 |
# Launch the Gradio app
|
| 250 |
if __name__ == "__main__":
|
| 251 |
pipeline = TrellisImageTo3DPipeline.from_pretrained("JeffreyXiang/TRELLIS-image-large")
|
| 252 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 253 |
try:
|
| 254 |
pipeline.preprocess_image(Image.fromarray(np.zeros((512, 512, 3), dtype=np.uint8))) # Preload rembg
|
| 255 |
except:
|
| 256 |
pass
|
| 257 |
-
|
|
|
|
|
|
|
|
|
|
|
|
| 249 |
# Launch the Gradio app
|
| 250 |
if __name__ == "__main__":
|
| 251 |
pipeline = TrellisImageTo3DPipeline.from_pretrained("JeffreyXiang/TRELLIS-image-large")
|
| 252 |
+
if torch.cuda.is_available():
|
| 253 |
+
pipeline.cuda()
|
| 254 |
+
print("CUDA is available. Using GPU.")
|
| 255 |
+
else:
|
| 256 |
+
print("CUDA not available. Falling back to CPU.")
|
| 257 |
try:
|
| 258 |
pipeline.preprocess_image(Image.fromarray(np.zeros((512, 512, 3), dtype=np.uint8))) # Preload rembg
|
| 259 |
except:
|
| 260 |
pass
|
| 261 |
+
print(f"CUDA Available: {torch.cuda.is_available()}")
|
| 262 |
+
print(f"CUDA Version: {torch.version.cuda}")
|
| 263 |
+
print(f"Number of GPUs: {torch.cuda.device_count()}")
|
| 264 |
+
demo.launch(debug=True)
|
requirements.txt
CHANGED
|
@@ -1,6 +1,3 @@
|
|
| 1 |
-
--extra-index-url https://download.pytorch.org/whl/cu121
|
| 2 |
-
--find-links https://nvidia-kaolin.s3.us-east-2.amazonaws.com/torch-2.4.0_cu121.html
|
| 3 |
-
|
| 4 |
torch==2.4.0
|
| 5 |
torchvision==0.19.0
|
| 6 |
pillow==10.4.0
|
|
@@ -20,7 +17,6 @@ igraph==0.11.8
|
|
| 20 |
git+https://github.com/EasternJournalist/utils3d.git@9a4eb15e4021b67b12c460c7057d642626897ec8
|
| 21 |
xformers==0.0.27.post2
|
| 22 |
kaolin==0.17.0
|
| 23 |
-
spconv-cu120==2.3.6
|
| 24 |
transformers==4.46.3
|
| 25 |
gradio_litmodel3d==0.0.1
|
| 26 |
https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.0.post2/flash_attn-2.7.0.post2+cu12torch2.4cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
torch==2.4.0
|
| 2 |
torchvision==0.19.0
|
| 3 |
pillow==10.4.0
|
|
|
|
| 17 |
git+https://github.com/EasternJournalist/utils3d.git@9a4eb15e4021b67b12c460c7057d642626897ec8
|
| 18 |
xformers==0.0.27.post2
|
| 19 |
kaolin==0.17.0
|
|
|
|
| 20 |
transformers==4.46.3
|
| 21 |
gradio_litmodel3d==0.0.1
|
| 22 |
https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.0.post2/flash_attn-2.7.0.post2+cu12torch2.4cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
|