Spaces:
Running
Running
File size: 1,988 Bytes
04b374c 4bba8df 853f29a 04b374c 4441b75 4bba8df 4441b75 853f29a 4441b75 853f29a 4441b75 853f29a 4441b75 853f29a 4441b75 853f29a 4441b75 853f29a 4441b75 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 |
import os
import gc
import shutil
import psutil
import torch
def is_disk_full(min_free_space_in_GB=10):
total, used, free = shutil.disk_usage("/")
free_gb = free / (1024 ** 3)
if free_gb >= min_free_space_in_GB:
print(f'enough space available ({free_gb} GB)')
return False
else:
print('clean up!')
return True
def release_model(model=None, label='Model'):
"""
Releases CPU and GPU memory used by a model or pipeline.
Args:
model: The object to delete (e.g., model, pipeline).
label: String label for log output.
"""
using_cuda = torch.cuda.is_available()
was_cuda = False
# CPU memory before
process = psutil.Process(os.getpid())
mem_cpu_before = process.memory_info().rss / 1e6 # MB
if using_cuda:
mem_gpu_before = torch.cuda.memory_allocated()
print(f"\n[{label}] GPU memory before release: {mem_gpu_before:.2f} MB")
print(f"[{label}] CPU memory before release: {mem_cpu_before:.2f} MB")
# Try to detect if model was on CUDA
if model is not None:
try:
if hasattr(model, 'parameters'):
was_cuda = any(p.is_cuda for p in model.parameters())
except Exception as e:
print(f"[{label}] Could not check device: {e}")
del model
# Garbage collection and cache clearing
gc.collect()
if using_cuda:
if was_cuda:
torch.cuda.empty_cache()
else:
print(f"[{label}] ⚠️ Model was not using CUDA, but CUDA is available.")
# CPU memory after
mem_cpu_after = process.memory_info().rss / 1e6 # MB
print(f"[{label}] CPU memory after release: {mem_cpu_after:.2f} MB")
if using_cuda:
mem_gpu_after = torch.cuda.memory_allocated()
print(f"[{label}] GPU memory after release: {mem_gpu_after:.2f} MB\n")
else:
print(f"[{label}] CUDA not available — GPU memory not tracked.\n")
|