You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
12 lines
283 B
12 lines
283 B
5 years ago
|
import torch.distributed as dist
|
||
|
from detectron2.utils.comm import get_world_size
|
||
|
|
||
|
|
||
|
def reduce_sum(tensor):
|
||
|
world_size = get_world_size()
|
||
|
if world_size < 2:
|
||
|
return tensor
|
||
|
tensor = tensor.clone()
|
||
|
dist.all_reduce(tensor, op=dist.ReduceOp.SUM)
|
||
|
return tensor
|