reset print memory

pull/456/head
yingtongxiong 2023-10-25 16:48:02 +08:00
parent 985465c96a
commit cc20fa271a
2 changed files with 1 additions and 4 deletions

View File

@ -316,8 +316,7 @@ class FSTPOverlapSchedulerHook(SchedulerHook):
self._overlap_handler.set_forward_mode(True)
def after_forward(self, scheduler, outputs) -> None:
print("after forward allocated memory: ", torch.cuda.memory_allocated() / 1024 / 1024 /1024, flush=True)
print("after forward max memory: ", torch.cuda.max_memory_allocated() / 1024 / 1024 / 1024, flush=True)
pass
def before_criterion(self, scheduler, outputs, label) -> None:
pass

View File

@ -255,8 +255,6 @@ def main(args):
# update parameters, and returns (success_update, grad_norm)
trainer_result = trainer.step()
assert trainer_result is not None
print("after step: ", torch.cuda.memory_allocated() / 1024 / 1024 /1024, flush=True)
print("after step: ", torch.cuda.max_memory_allocated() / 1024 / 1024 / 1024, flush=True)
success_update, grad_norm_groups = trainer_result
if success_update: # update parameters successfully