From 01066152f12f05e3e3f843c180814a1b55262555 Mon Sep 17 00:00:00 2001 From: Nikita Shulga Date: Thu, 16 Feb 2023 17:22:45 -0800 Subject: [PATCH] Don't use `torch._six` (#2775) * Don't use `torch._six` This is a private API which is gone after https://github.com/pytorch/pytorch/pull/94709 * Update common.py --- colossalai/utils/common.py | 2 +- colossalai/zero/sharded_optim/_utils.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/colossalai/utils/common.py b/colossalai/utils/common.py index 2099883fb..e35b29c2a 100644 --- a/colossalai/utils/common.py +++ b/colossalai/utils/common.py @@ -11,7 +11,7 @@ from typing import Callable, Dict, List, Optional, Union import torch import torch.distributed as dist -from torch._six import inf +from torch import inf from torch.nn.parameter import Parameter from colossalai.constants import IS_TENSOR_PARALLEL, NUM_PARTITIONS, TENSOR_PARALLEL_ATTRIBUTES diff --git a/colossalai/zero/sharded_optim/_utils.py b/colossalai/zero/sharded_optim/_utils.py index e67434401..68928b232 100644 --- a/colossalai/zero/sharded_optim/_utils.py +++ b/colossalai/zero/sharded_optim/_utils.py @@ -3,7 +3,7 @@ from typing import Optional import torch import torch.distributed as dist -from torch._six import inf +from torch.six import inf from torch._utils import _flatten_dense_tensors, _unflatten_dense_tensors from colossalai.tensor import ColoParameter