Spaces:
Sleeping
Sleeping
Update clip_model.py
Browse files- clip_model.py +8 -2
clip_model.py
CHANGED
@@ -56,8 +56,14 @@ class ClipModel:
|
|
56 |
batch_size = 16 # Process 16 vocab at a time
|
57 |
similarities = []
|
58 |
|
59 |
-
#
|
60 |
-
torch.cuda.
|
|
|
|
|
|
|
|
|
|
|
|
|
61 |
|
62 |
with torch.no_grad():
|
63 |
for i in range(0, len(self.vocab), batch_size):
|
|
|
56 |
batch_size = 16 # Process 16 vocab at a time
|
57 |
similarities = []
|
58 |
|
59 |
+
# Check memory usage before calling empty_cache
|
60 |
+
reserved_memory = torch.cuda.memory_reserved()
|
61 |
+
allocated_memory = torch.cuda.memory_allocated()
|
62 |
+
|
63 |
+
# Only call empty_cache if reserved memory exceeds a threshold
|
64 |
+
if reserved_memory > 0.8 * torch.cuda.get_device_properties(0).total_memory:
|
65 |
+
# Release unused memory
|
66 |
+
torch.cuda.empty_cache()
|
67 |
|
68 |
with torch.no_grad():
|
69 |
for i in range(0, len(self.vocab), batch_size):
|