[zero] zero init context collect numel of model (#375)

pull/394/head
Jiarui Fang 2022-03-10 16:31:02 +08:00 committed by Frank Lee
parent 1ed7c24c02
commit 6b6002962a
2 changed files with 9 additions and 3 deletions

View File

@ -100,7 +100,8 @@ class ZeroInitContext(InsertPostInitMethodToModuleSubClasses):
shard_strategy: BaseShardStrategy,
shard_param: bool = False,
shard_grad: bool = False,
rm_torch_payload_on_the_fly=False):
rm_torch_payload_on_the_fly=False,
model_numel_tensor: torch.Tensor = torch.zeros(1, dtype=torch.int)):
super().__init__()
self.convert_fp16 = convert_fp16
self.target_device = target_device
@ -110,6 +111,7 @@ class ZeroInitContext(InsertPostInitMethodToModuleSubClasses):
# FIXME(jiaruifang) now setting it to True is invalid.
self.rm_torch_payload_on_the_fly = False
self.initialized_param_list = []
self.model_numel_tensor = model_numel_tensor
def _post_context_exec(self):
"""The callback function when the context exits.
@ -129,6 +131,8 @@ class ZeroInitContext(InsertPostInitMethodToModuleSubClasses):
if hasattr(param, 'col_attr'):
continue
self.model_numel_tensor += param.numel()
target_device = self.target_device
# convert to fp16 if necessary

View File

@ -23,10 +23,12 @@ def run_dist(rank, world_size, port, init_device):
for get_components_func in non_distributed_component_funcs:
model_builder, _, _, _, _ = get_components_func()
model_numel_tensor = torch.zeros(1, dtype=torch.int)
with ZeroInitContext(convert_fp16=True,
target_device=init_device,
shard_strategy=TensorShardStrategy(),
shard_param=True):
shard_param=True,
model_numel_tensor=model_numel_tensor):
model = model_builder(checkpoint=True)
for param in model.parameters():
@ -38,7 +40,7 @@ def run_dist(rank, world_size, port, init_device):
print(f'cpu usgae {GLOBAL_MODEL_DATA_TRACER.cpu_usage}')
print(f'cuda usgae {GLOBAL_MODEL_DATA_TRACER.cuda_usage}')
print(f'numel {model_numel_tensor}')
if init_device.type == 'cuda':
assert (GLOBAL_MODEL_DATA_TRACER.cuda_usage > 0)
elif init_device.type == 'cpu':