2022-06-10 02:09:48 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# coding: utf-8
|
|
|
|
|
2022-11-10 07:17:20 +00:00
|
|
|
import inspect
|
|
|
|
import types
|
|
|
|
from typing import Callable, List
|
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
import torch
|
2022-07-15 09:47:12 +00:00
|
|
|
import torch.nn as nn
|
|
|
|
|
2022-11-10 07:17:20 +00:00
|
|
|
from colossalai.tensor import ColoParameter, ColoTensor
|
2022-06-29 13:02:30 +00:00
|
|
|
from colossalai.utils.model.utils import substitute_init_recursively
|
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
|
|
|
|
class LazyInitContext():
|
|
|
|
"""
|
2022-11-10 07:17:20 +00:00
|
|
|
A context to allow for lazy weight initialization of PyTorch modules. It intercepts the tensor
|
2022-06-10 02:09:48 +00:00
|
|
|
initialization functions for lazy initialization
|
2022-09-06 03:45:08 +00:00
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
Note:
|
2022-11-10 07:17:20 +00:00
|
|
|
This API is only experimental and subject to future changes.
|
2022-07-15 09:47:12 +00:00
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
Usage:
|
|
|
|
with LazyInitContext() as ctx:
|
|
|
|
model = nn.Linear(10, 10)
|
|
|
|
model.weight.zero_()
|
2022-09-06 03:45:08 +00:00
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
# make sure the weight is a meta tensor
|
|
|
|
assert model.weight.is_meta
|
2022-09-06 03:45:08 +00:00
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
# initialize weights
|
|
|
|
ctx.lazy_init_parameters(model)
|
2022-09-06 03:45:08 +00:00
|
|
|
|
2022-11-10 07:17:20 +00:00
|
|
|
# make sure the weight is not a meta tensor
|
2022-06-10 02:09:48 +00:00
|
|
|
# and initialized correctly
|
|
|
|
assert not model.weight.is_meta and torch.all(model.weight == 0)
|
2022-09-06 03:45:08 +00:00
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
Args:
|
2022-11-10 07:17:20 +00:00
|
|
|
to_meta (bool): optional, whether to initialize the model with meta tensors, default is True. This
|
|
|
|
argument exists for now because some corner cases such as self.weight = torch.zeros(...) cannot be captured yet.
|
|
|
|
extra_torch_tensor_func (List[str]): extra torch tensor functions related
|
2022-06-10 02:09:48 +00:00
|
|
|
to value setting, such as `zero_` and `triu_`. `zero_` is pre-added by default.
|
|
|
|
"""
|
2022-06-29 13:02:30 +00:00
|
|
|
|
2022-07-15 09:47:12 +00:00
|
|
|
tensor_set_value_func = ['zero_', 'fill_']
|
2022-06-29 13:02:30 +00:00
|
|
|
|
2022-11-10 07:17:20 +00:00
|
|
|
def __init__(self, to_meta: bool = True, extra_torch_tensor_func: List[str] = None):
|
2022-07-15 09:47:12 +00:00
|
|
|
# TODO: hijack the torch constructor functions as well
|
|
|
|
self._to_meta = to_meta
|
|
|
|
self._intercepted_nn_init_func_cache = {}
|
2022-06-10 02:09:48 +00:00
|
|
|
self._nn_init_methods = self._get_nn_init_methods()
|
|
|
|
self._torch_mod_cls = torch.nn.modules.module.Module
|
2022-06-29 13:02:30 +00:00
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
if extra_torch_tensor_func:
|
|
|
|
# use tuple to remove duplicates
|
|
|
|
self._torch_tensor_funcs = tuple(self.tensor_set_value_func + extra_torch_tensor_func)
|
|
|
|
else:
|
|
|
|
self._torch_tensor_funcs = self.tensor_set_value_func
|
2022-06-29 13:02:30 +00:00
|
|
|
|
2022-07-15 09:47:12 +00:00
|
|
|
@property
|
|
|
|
def to_meta(self):
|
|
|
|
return self._to_meta
|
|
|
|
|
|
|
|
def _cache_init_func(self, func):
|
2022-06-10 02:09:48 +00:00
|
|
|
"""
|
2022-07-15 09:47:12 +00:00
|
|
|
This method wraps the ``torch.nn.init`` method and torch tensor value-setting functions
|
|
|
|
so that the function call is cached instead of being executed.
|
2022-06-10 02:09:48 +00:00
|
|
|
"""
|
2022-06-29 13:02:30 +00:00
|
|
|
|
2022-07-15 09:47:12 +00:00
|
|
|
def wrapped_init_func(tensor, *args, **kwargs):
|
|
|
|
if tensor not in self._intercepted_nn_init_func_cache:
|
|
|
|
self._intercepted_nn_init_func_cache[tensor] = []
|
|
|
|
self._intercepted_nn_init_func_cache[tensor].append((func, args, kwargs))
|
2022-06-29 13:02:30 +00:00
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
return wrapped_init_func
|
2022-06-29 13:02:30 +00:00
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
def _get_nn_init_methods(self):
|
|
|
|
"""
|
|
|
|
This method looks for all available functions in the ``torch.nn.init``
|
|
|
|
module.
|
|
|
|
"""
|
|
|
|
nn_init_method_names = dir(torch.nn.init)
|
|
|
|
nn_init_methods = []
|
2022-06-29 13:02:30 +00:00
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
# look for all methods in ``torch.nn.init`` module
|
|
|
|
for name in nn_init_method_names:
|
|
|
|
nn_init_methods.append((name, getattr(torch.nn.init, name)))
|
2022-06-29 13:02:30 +00:00
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
def _is_init_method(item):
|
|
|
|
name, func = item
|
2022-07-15 09:47:12 +00:00
|
|
|
|
|
|
|
if (not isinstance(func, types.FunctionType) or name.startswith('_') or not name.endswith('_')):
|
2022-06-10 02:09:48 +00:00
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return True
|
2022-06-29 13:02:30 +00:00
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
# remove methods which are not init functions
|
|
|
|
nn_init_methods = list(filter(_is_init_method, nn_init_methods))
|
|
|
|
return nn_init_methods
|
2022-06-29 13:02:30 +00:00
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
def _wrap_module_init(self, func):
|
|
|
|
"""
|
|
|
|
This method wraps the calls to the `__init__` of ``torch.nn.Module`` and replaces
|
|
|
|
the argument device with value 'meta' so that all modules are created as meta tensors.
|
|
|
|
"""
|
|
|
|
has_device = 'device' in inspect.signature(func).parameters
|
|
|
|
|
2022-06-29 13:02:30 +00:00
|
|
|
def layer_lazy_init(module, *args, **kwargs):
|
2022-07-15 09:47:12 +00:00
|
|
|
# if this module contains device argument
|
|
|
|
# we set it to meta to initialize as meta backend
|
2022-06-10 02:09:48 +00:00
|
|
|
if has_device:
|
|
|
|
kwargs['device'] = 'meta'
|
2022-06-29 13:02:30 +00:00
|
|
|
func(module, *args, **kwargs)
|
2022-07-15 09:47:12 +00:00
|
|
|
|
|
|
|
# if device is not found, we intialize it and convert to meta
|
2022-06-29 13:02:30 +00:00
|
|
|
if not has_device:
|
|
|
|
module.to('meta')
|
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
return layer_lazy_init
|
2022-06-29 13:02:30 +00:00
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
def _get_tmp_origin_func_ref(self, name):
|
|
|
|
"""
|
|
|
|
Generate a function name for consistency during caching and retrieving.
|
|
|
|
"""
|
|
|
|
return f'_orig_{name}'
|
2022-06-29 13:02:30 +00:00
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
def _patch_nn_init_funcs(self):
|
|
|
|
# patch nn.init functions
|
|
|
|
for name, func in self._nn_init_methods:
|
2022-07-15 09:47:12 +00:00
|
|
|
setattr(torch.nn.init, name, self._cache_init_func(func))
|
2022-06-29 13:02:30 +00:00
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
def _unpatch_nn_init_funcs(self):
|
|
|
|
# unpatch nn.init functions
|
|
|
|
for name, func in self._nn_init_methods:
|
|
|
|
setattr(torch.nn.init, name, func)
|
2022-06-29 13:02:30 +00:00
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
def _patch_submodule_init(self):
|
|
|
|
# patch classes __init__ methods
|
2022-06-29 13:02:30 +00:00
|
|
|
def _activate_wrap_init(cls):
|
|
|
|
cls.__orig_init__ = cls.__init__
|
|
|
|
cls.__init__ = self._wrap_module_init(cls.__init__)
|
|
|
|
|
2022-09-06 03:45:08 +00:00
|
|
|
substitute_init_recursively(self._torch_mod_cls, _activate_wrap_init, set())
|
2022-06-29 13:02:30 +00:00
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
def _unpatch_submodule_init(self):
|
2022-06-29 13:02:30 +00:00
|
|
|
|
|
|
|
def _recover_orig_init(cls):
|
|
|
|
cls.__init__ = cls.__orig_init__
|
|
|
|
|
2022-09-06 03:45:08 +00:00
|
|
|
substitute_init_recursively(self._torch_mod_cls, _recover_orig_init, set())
|
2022-06-29 13:02:30 +00:00
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
def _patch_torch_tensor_funcs(self):
|
|
|
|
# patch tensor value-setting functions
|
|
|
|
for func_name in self._torch_tensor_funcs:
|
|
|
|
origin_func_name = self._get_tmp_origin_func_ref(func_name)
|
|
|
|
origin_func = getattr(torch.Tensor, func_name)
|
|
|
|
setattr(torch.Tensor, origin_func_name, origin_func)
|
2022-07-15 09:47:12 +00:00
|
|
|
setattr(torch.Tensor, func_name, self._cache_init_func(origin_func))
|
2022-06-29 13:02:30 +00:00
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
def _unpatch_torch_tensor_funcs(self):
|
|
|
|
for func_name in self._torch_tensor_funcs:
|
|
|
|
origin_func_name = self._get_tmp_origin_func_ref(func_name)
|
|
|
|
origin_func = getattr(torch.Tensor, origin_func_name)
|
|
|
|
setattr(torch.Tensor, func_name, origin_func)
|
2022-06-29 13:02:30 +00:00
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
def __enter__(self):
|
2022-07-15 09:47:12 +00:00
|
|
|
self._patch_torch_tensor_funcs()
|
|
|
|
self._patch_nn_init_funcs()
|
|
|
|
|
|
|
|
if self._to_meta:
|
|
|
|
self._patch_submodule_init()
|
2022-06-10 02:09:48 +00:00
|
|
|
return self
|
2022-06-29 13:02:30 +00:00
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
def __exit__(self, *args, **kwargs):
|
2022-07-15 09:47:12 +00:00
|
|
|
if self._to_meta:
|
|
|
|
self._unpatch_submodule_init()
|
|
|
|
self._unpatch_nn_init_funcs()
|
|
|
|
self._unpatch_torch_tensor_funcs()
|
2022-06-29 13:02:30 +00:00
|
|
|
|
2022-07-20 02:45:17 +00:00
|
|
|
def lazy_init_parameters(self, model: torch.nn.Module, device='cpu'):
|
2022-06-10 02:09:48 +00:00
|
|
|
"""
|
|
|
|
Initialize the weights of the meta-tensor model.
|
2022-09-06 03:45:08 +00:00
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
Args:
|
|
|
|
model (`torch.nn.Module`): the model instantiated under the context.
|
|
|
|
device (str): the device on which weights are initialized
|
2022-07-15 09:47:12 +00:00
|
|
|
|
2022-06-10 02:09:48 +00:00
|
|
|
"""
|
2022-07-15 09:47:12 +00:00
|
|
|
|
|
|
|
def _init_recursively(module: nn.Module):
|
|
|
|
# recursively initialize the module
|
|
|
|
for mod in module.children():
|
|
|
|
_init_recursively(mod)
|
|
|
|
|
|
|
|
# initialize and shard tensors directly attached to the current module
|
|
|
|
for name, param in module.named_parameters(recurse=False):
|
|
|
|
_init_and_shard(module, name, param)
|
|
|
|
|
|
|
|
for name, buf in module.named_buffers(recurse=False):
|
|
|
|
_init_and_shard(module, name, buf)
|
|
|
|
|
|
|
|
@torch.no_grad()
|
|
|
|
def _init_and_shard(module, name, tensor):
|
|
|
|
# check whether the tensor is a buffer or parameter
|
|
|
|
is_param = isinstance(tensor, nn.parameter.Parameter)
|
|
|
|
|
|
|
|
# get sharding spec
|
|
|
|
dist_spec = getattr(tensor, 'dist_spec', None)
|
|
|
|
pg = getattr(tensor, 'pg', None)
|
2022-07-20 02:45:17 +00:00
|
|
|
comp_spec = getattr(tensor, 'comp_spec', None)
|
2022-07-15 09:47:12 +00:00
|
|
|
|
|
|
|
# convert the tensor from meta to materialized one
|
|
|
|
if tensor.is_meta:
|
|
|
|
materialized_tensor = torch.empty_like(tensor, device=device)
|
|
|
|
# if this tensor is a meta tensor, it must have an init function
|
|
|
|
assert tensor in self._intercepted_nn_init_func_cache
|
2022-11-10 07:17:20 +00:00
|
|
|
else:
|
|
|
|
materialized_tensor = tensor
|
2022-07-15 09:47:12 +00:00
|
|
|
|
|
|
|
# apply init function
|
|
|
|
if tensor in self._intercepted_nn_init_func_cache:
|
|
|
|
init_func, args, kwargs = self._intercepted_nn_init_func_cache[tensor][-1]
|
2022-11-10 07:17:20 +00:00
|
|
|
init_func(materialized_tensor, *args, **kwargs)
|
2022-07-15 09:47:12 +00:00
|
|
|
|
|
|
|
# convert it to ColoTensor or ColoParameter
|
|
|
|
if is_param:
|
2022-11-10 07:17:20 +00:00
|
|
|
tensor = ColoParameter.from_torch_tensor(materialized_tensor, requires_grad=tensor.requires_grad)
|
2022-07-04 02:12:02 +00:00
|
|
|
else:
|
2022-11-10 07:17:20 +00:00
|
|
|
tensor = ColoTensor.from_torch_tensor(materialized_tensor)
|
2022-07-15 09:47:12 +00:00
|
|
|
|
|
|
|
# override the original tensor
|
|
|
|
with torch.no_grad():
|
|
|
|
setattr(module, name, tensor)
|
2022-07-04 02:12:02 +00:00
|
|
|
|
2022-07-20 02:45:17 +00:00
|
|
|
# apply sharding
|
|
|
|
if dist_spec:
|
|
|
|
tensor.process_group = pg
|
|
|
|
tensor.set_tensor_spec(dist_spec, comp_spec)
|
|
|
|
|
2022-07-15 09:47:12 +00:00
|
|
|
_init_recursively(model)
|
2022-07-04 02:12:02 +00:00
|
|
|
|
|
|
|
return model
|