Browse Source

Update parallel_context.py (#2408)

pull/2413/head
Haofan Wang 2 years ago committed by GitHub
parent
commit
7d5640b9db
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      colossalai/context/parallel_context.py

2
colossalai/context/parallel_context.py

@ -375,7 +375,7 @@ class ParallelContext(metaclass=SingletonMeta):
# None will give the default global process group for pytorch dist operations
ranks = list(range(world_size))
cpu_group = dist.new_group(ranks, backend='gloo') if dist.get_backend() != 'gloo' else None
cpu_group = dist.new_group(ranks, backend='gloo') if dist.get_backend() == 'gloo' else None
self._register_dist(rank, world_size, dist.GroupMember.WORLD, cpu_group, ranks, ParallelMode.GLOBAL)
self.add_global_rank(ParallelMode.GLOBAL, rank)

Loading…
Cancel
Save