2022-06-23 09:34:59 +00:00
|
|
|
import torch
|
|
|
|
|
|
|
|
from typing import Optional
|
|
|
|
|
2022-06-03 10:04:22 +00:00
|
|
|
from colossalai.tensor.colo_tensor import ColoTensor
|
|
|
|
from colossalai.tensor.const import TensorType
|
2022-07-06 08:15:16 +00:00
|
|
|
from colossalai.tensor import ColoTensorSpec
|
2022-06-13 08:11:53 +00:00
|
|
|
from colossalai.tensor.param_op_hook import ParamOpHookManager
|
2022-05-06 04:57:14 +00:00
|
|
|
|
2022-05-31 04:00:12 +00:00
|
|
|
|
2022-06-15 06:23:27 +00:00
|
|
|
def filter_args(func, *args):
|
|
|
|
return [arg for arg in args if func(arg)]
|
|
|
|
|
|
|
|
|
|
|
|
def replace_args(args, kwargs, new_args):
|
|
|
|
args = new_args[:len(args)]
|
|
|
|
for k, v in zip(kwargs.keys(), new_args[len(args):]):
|
|
|
|
kwargs[k] = v
|
2022-06-17 08:12:05 +00:00
|
|
|
return tuple(args), kwargs
|
2022-06-15 06:23:27 +00:00
|
|
|
|
|
|
|
|
2022-05-30 09:23:44 +00:00
|
|
|
class ColoParameter(ColoTensor, torch.nn.Parameter):
|
2022-05-06 04:57:14 +00:00
|
|
|
r"""A kind of ColoTensor to be considered as a module parameter.
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
2022-05-19 04:44:59 +00:00
|
|
|
def __new__(cls,
|
2022-05-30 09:23:44 +00:00
|
|
|
data: Optional[torch.Tensor] = None,
|
2022-05-19 04:44:59 +00:00
|
|
|
requires_grad: bool = True,
|
2022-07-06 08:15:16 +00:00
|
|
|
spec: ColoTensorSpec = None) -> 'ColoParameter':
|
2022-05-19 04:44:59 +00:00
|
|
|
if data is None:
|
|
|
|
data = torch.empty(0)
|
|
|
|
return torch.Tensor._make_subclass(cls, data, requires_grad)
|
2022-05-06 04:57:14 +00:00
|
|
|
|
2022-05-19 04:44:59 +00:00
|
|
|
def __init__(self,
|
2022-05-30 09:23:44 +00:00
|
|
|
data: Optional[torch.Tensor] = None,
|
2022-05-19 04:44:59 +00:00
|
|
|
requires_grad: bool = True,
|
2022-07-06 08:15:16 +00:00
|
|
|
spec: ColoTensorSpec = None) -> None:
|
|
|
|
ColoTensor.__init__(self, data, spec)
|
2022-05-19 04:44:59 +00:00
|
|
|
self._type = TensorType.MODEL
|
2022-05-26 10:15:42 +00:00
|
|
|
# a list contains modules sharing this ColoParameter with others.
|
|
|
|
self._shared_param_modules = []
|
|
|
|
|
|
|
|
@property
|
|
|
|
def shared_param_modules(self):
|
|
|
|
return self._shared_param_modules
|
|
|
|
|
2022-05-06 04:57:14 +00:00
|
|
|
@staticmethod
|
2022-05-19 04:44:59 +00:00
|
|
|
def from_torch_tensor(tensor: torch.Tensor,
|
|
|
|
requires_grad: bool = True,
|
2022-07-06 08:15:16 +00:00
|
|
|
spec: ColoTensorSpec = None) -> 'ColoParameter':
|
2022-05-19 04:44:59 +00:00
|
|
|
tensor = tensor.as_subclass(ColoParameter)
|
|
|
|
tensor.__init__(tensor, requires_grad=requires_grad, spec=spec)
|
|
|
|
return tensor
|
2022-05-20 12:19:58 +00:00
|
|
|
|
|
|
|
def __repr__(self):
|
2022-07-08 06:55:27 +00:00
|
|
|
return f'ColoParameter: {ColoTensor.__repr__(self)}'
|
2022-05-30 09:23:44 +00:00
|
|
|
|
2022-05-31 04:00:12 +00:00
|
|
|
@classmethod
|
|
|
|
def __torch_function__(cls, func, types, args=..., kwargs=None):
|
2022-06-13 08:11:53 +00:00
|
|
|
if ParamOpHookManager.has_hook():
|
2022-05-31 04:00:12 +00:00
|
|
|
if not func.__name__.startswith('__'):
|
2022-06-15 06:23:27 +00:00
|
|
|
if kwargs is None:
|
|
|
|
kwargs = {}
|
|
|
|
params = filter_args(lambda arg: isinstance(arg, ColoParameter), *args, *kwargs.values())
|
2022-05-31 04:00:12 +00:00
|
|
|
if len(params) > 0:
|
|
|
|
with torch._C.DisableTorchFunction():
|
2022-06-15 06:23:27 +00:00
|
|
|
new_args = ParamOpHookManager.pre_op(params, *args, *kwargs.values())
|
|
|
|
args, kwargs = replace_args(args, kwargs, new_args)
|
2022-05-31 04:00:12 +00:00
|
|
|
ret = super().__torch_function__(func, types, args, kwargs)
|
|
|
|
with torch._C.DisableTorchFunction():
|
2022-06-13 08:11:53 +00:00
|
|
|
ret = ParamOpHookManager.post_op(params, ret)
|
2022-05-31 04:00:12 +00:00
|
|
|
return ret
|
|
|
|
return super().__torch_function__(func, types, args, kwargs)
|
|
|
|
|
2022-05-30 09:23:44 +00:00
|
|
|
def __deepcopy__(self, memo):
|
|
|
|
if id(self) in memo:
|
|
|
|
return memo[id(self)]
|
|
|
|
else:
|
|
|
|
with torch._C.DisableTorchFunction():
|
|
|
|
data = self.data.clone()
|
2022-07-06 08:15:16 +00:00
|
|
|
tensor = ColoParameter(data,
|
|
|
|
self.requires_grad,
|
|
|
|
spec=ColoTensorSpec(self.get_process_group(), self.dist_spec, self.compute_spec))
|
2022-05-30 09:23:44 +00:00
|
|
|
memo[id(self)] = tensor
|
|
|
|
return tensor
|
|
|
|
|
|
|
|
def __reduce_ex__(self, proto):
|
|
|
|
# Adapted from torch._utils._rebuild_parameter
|
|
|
|
# def _rebuild_colo_parameter(data, requires_grad, backward_hooks):
|
|
|
|
# colo_param = ColoParameter(data, requires_grad)
|
|
|
|
# colo_param._backward_hooks = backward_hooks
|
|
|
|
# return colo_param
|
|
|
|
|
|
|
|
# return (
|
|
|
|
# _rebuild_colo_parameter,
|
|
|
|
# (self.data, self.requires_grad, OrderedDict())
|
|
|
|
# )
|
|
|
|
|
|
|
|
# TODO(jzy) we don't support object reflection now.
|
|
|
|
# distspec cannot be pickled or rebuilt because it's tightly connected to runtime attribute `process_group`.
|
|
|
|
raise NotImplementedError
|