mirror of https://github.com/hpcaitech/ColossalAI
[compatibility] used backward-compatible API for global process group (#758)
parent
4ea49cb536
commit
920fe31526
|
@ -374,7 +374,7 @@ class ParallelContext(metaclass=SingletonMeta):
|
|||
# None will give the default global process group for pytorch dist operations
|
||||
ranks = list(range(world_size))
|
||||
cpu_group = dist.new_group(ranks, backend='gloo') if dist.get_backend() != 'gloo' else None
|
||||
self._register_dist(rank, world_size, None, cpu_group, ranks, ParallelMode.GLOBAL)
|
||||
self._register_dist(rank, world_size, dist.GroupMember.WORLD, cpu_group, ranks, ParallelMode.GLOBAL)
|
||||
self.add_global_rank(ParallelMode.GLOBAL, rank)
|
||||
|
||||
def _register_dist(self, local_rank, world_size, process_group, cpu_group, ranks_in_group, mode):
|
||||
|
|
Loading…
Reference in New Issue