Browse Source

Don't use `torch._six` (#2775)

* Don't use `torch._six`

This is a private API which is gone after https://github.com/pytorch/pytorch/pull/94709

* Update common.py
pull/2777/head
Nikita Shulga 2 years ago committed by GitHub
parent
commit
01066152f1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      colossalai/utils/common.py
  2. 2
      colossalai/zero/sharded_optim/_utils.py

2
colossalai/utils/common.py

@ -11,7 +11,7 @@ from typing import Callable, Dict, List, Optional, Union
import torch
import torch.distributed as dist
from torch._six import inf
from torch import inf
from torch.nn.parameter import Parameter
from colossalai.constants import IS_TENSOR_PARALLEL, NUM_PARTITIONS, TENSOR_PARALLEL_ATTRIBUTES

2
colossalai/zero/sharded_optim/_utils.py

@ -3,7 +3,7 @@ from typing import Optional
import torch
import torch.distributed as dist
from torch._six import inf
from torch.six import inf
from torch._utils import _flatten_dense_tensors, _unflatten_dense_tensors
from colossalai.tensor import ColoParameter

Loading…
Cancel
Save