File size: 155 Bytes
8677815
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
# USE THIS FILE TO CLEAR GPU CACHE

import gc
import torch
# def report_gpu():
print(torch.cuda.list_gpu_processes())
gc.collect()
torch.cuda.empty_cache()