From 99d9713b02664a51861e8ece23b974f5428c4f3e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E3=82=A2=E3=83=9E=E3=83=87=E3=82=A6=E3=82=B9?= Date: Thu, 19 Jan 2023 12:23:03 +0800 Subject: [PATCH] Revert "Update parallel_context.py (#2408)" This reverts commit 7d5640b9db01b501e95b66e91be9fe27b58d2e58. --- colossalai/context/parallel_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/colossalai/context/parallel_context.py b/colossalai/context/parallel_context.py index b7338b53d..dd12dad6d 100644 --- a/colossalai/context/parallel_context.py +++ b/colossalai/context/parallel_context.py @@ -375,7 +375,7 @@ class ParallelContext(metaclass=SingletonMeta): # None will give the default global process group for pytorch dist operations ranks = list(range(world_size)) - cpu_group = dist.new_group(ranks, backend='gloo') if dist.get_backend() == 'gloo' else None + cpu_group = dist.new_group(ranks, backend='gloo') if dist.get_backend() != 'gloo' else None self._register_dist(rank, world_size, dist.GroupMember.WORLD, cpu_group, ranks, ParallelMode.GLOBAL) self.add_global_rank(ParallelMode.GLOBAL, rank)