diff --git a/.compatibility b/.compatibility index d90a74b58..7ecced624 100644 --- a/.compatibility +++ b/.compatibility @@ -1 +1,2 @@ 2.1.0-12.1.0 +2.2.2-12.1.0 diff --git a/colossalai/tensor/d_tensor/layout_converter.py b/colossalai/tensor/d_tensor/layout_converter.py index c2cf73181..0f0150d90 100644 --- a/colossalai/tensor/d_tensor/layout_converter.py +++ b/colossalai/tensor/d_tensor/layout_converter.py @@ -473,7 +473,7 @@ class LayoutConverter(metaclass=SingletonMeta): for process_group in used_process_groups: try: dist.get_rank(process_group) - except RuntimeError as e: + except (ValueError, RuntimeError) as e: # If the group is not registered, it means it has been deleted if str(e) == ( f"Group {process_group} is not registered, please create group with torch.distributed.new_group API"