File size: 629 Bytes
68964c2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
# New module to handle device setup
import torch
from app.core.logging_setup import logger

def check_gpu_availability():
    print("Checking GPU Availability")
    if torch.cuda.is_available():  # PyTorch check
        gpu_count = torch.cuda.device_count()
        logger.info(f"✅ PyTorch CUDA is available! Found {gpu_count} GPU(s).")
        for i in range(gpu_count):
            gpu_name = torch.cuda.get_device_name(i)
            logger.info(f"   GPU {i}: {gpu_name}")
        return "cuda"
    else:
        logger.warning("⚠️ PyTorch CUDA is not available.")
        return "cpu"

device = check_gpu_availability()