Spaces:
Sleeping
Sleeping
Update my_model/utilities.py
Browse files- my_model/utilities.py +17 -0
my_model/utilities.py
CHANGED
|
@@ -8,6 +8,7 @@ import torch
|
|
| 8 |
import matplotlib.pyplot as plt
|
| 9 |
from IPython import get_ipython
|
| 10 |
import sys
|
|
|
|
| 11 |
|
| 12 |
|
| 13 |
class VQADataProcessor:
|
|
@@ -290,6 +291,22 @@ def get_model_path(model_name):
|
|
| 290 |
return model_path
|
| 291 |
|
| 292 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 293 |
|
| 294 |
if __name__ == "__main__":
|
| 295 |
pass
|
|
|
|
| 8 |
import matplotlib.pyplot as plt
|
| 9 |
from IPython import get_ipython
|
| 10 |
import sys
|
| 11 |
+
import gc
|
| 12 |
|
| 13 |
|
| 14 |
class VQADataProcessor:
|
|
|
|
| 291 |
return model_path
|
| 292 |
|
| 293 |
|
| 294 |
+
def free_gpu_resources():
|
| 295 |
+
"""
|
| 296 |
+
Clears GPU memory.
|
| 297 |
+
"""
|
| 298 |
+
try:
|
| 299 |
+
if torch.cuda.is_available():
|
| 300 |
+
torch.cuda.empty_cache()
|
| 301 |
+
gc.collect()
|
| 302 |
+
torch.cuda.empty_cache()
|
| 303 |
+
gc.collect()
|
| 304 |
+
|
| 305 |
+
print("GPU memory has been cleared.")
|
| 306 |
+
except Exception as e:
|
| 307 |
+
print(f"Error occurred while clearing GPU memory: {str(e)}")
|
| 308 |
+
|
| 309 |
+
|
| 310 |
|
| 311 |
if __name__ == "__main__":
|
| 312 |
pass
|