File size: 1,684 Bytes
44ddffd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 |
import io
import os
import torch
import torch.distributed as dist
_print = print
def get_world_size(): return int(os.getenv('WORLD_SIZE', 1))
def get_rank(): return int(os.getenv('RANK', 0))
def get_local_rank(): return int(os.getenv('LOCAL_RANK', 0))
def is_dist():
return dist.is_available() and dist.is_initialized() and get_world_size() > 1
def print(*argc, all=False, **kwargs):
if not is_dist():
_print(*argc, **kwargs)
return
if not all and get_local_rank() != 0:
return
output = io.StringIO()
kwargs['end'] = ''
kwargs['file'] = output
kwargs['flush'] = True
_print(*argc, **kwargs)
s = output.getvalue()
output.close()
s = '[rank {}] {}'.format(dist.get_rank(), s)
_print(s)
def reduce_mean(tensor, nprocs=None):
if not is_dist():
return tensor
if not isinstance(tensor, torch.Tensor):
device = torch.cuda.current_device()
rt = torch.tensor(tensor, device=device)
else:
rt = tensor.clone()
dist.all_reduce(rt, op=dist.ReduceOp.SUM)
nprocs = nprocs if nprocs else dist.get_world_size()
rt = rt / nprocs
if not isinstance(tensor, torch.Tensor):
rt = rt.item()
return rt
def reduce_sum(tensor):
if not is_dist():
return tensor
if not isinstance(tensor, torch.Tensor):
device = torch.cuda.current_device()
rt = torch.tensor(tensor, device=device)
else:
rt = tensor.clone()
dist.all_reduce(rt, op=dist.ReduceOp.SUM)
if not isinstance(tensor, torch.Tensor):
rt = rt.item()
return rt
def barrier():
if not is_dist():
return
dist.barrier() |