[compatibility] used backward-compatible API for global process group (#758)

pull/770/head
Frank Lee 2022-04-14 17:20:35 +08:00 committed by GitHub
parent 4ea49cb536
commit 920fe31526
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 1 additions and 1 deletions

View File

@ -374,7 +374,7 @@ class ParallelContext(metaclass=SingletonMeta):
# None will give the default global process group for pytorch dist operations
ranks = list(range(world_size))
cpu_group = dist.new_group(ranks, backend='gloo') if dist.get_backend() != 'gloo' else None
self._register_dist(rank, world_size, None, cpu_group, ranks, ParallelMode.GLOBAL)
self._register_dist(rank, world_size, dist.GroupMember.WORLD, cpu_group, ranks, ParallelMode.GLOBAL)
self.add_global_rank(ParallelMode.GLOBAL, rank)
def _register_dist(self, local_rank, world_size, process_group, cpu_group, ranks_in_group, mode):