diff --git a/colossalai/nn/parallel/data_parallel.py b/colossalai/nn/parallel/data_parallel.py index f88534bc3..9b0e88ea8 100644 --- a/colossalai/nn/parallel/data_parallel.py +++ b/colossalai/nn/parallel/data_parallel.py @@ -181,7 +181,7 @@ class ColoDDPV2(ColoDDP): def _post_backward(self): self.chunk_manager.exec_lazy_release() self._setup_grads_ptr() - self._logger.info( + self._logger.debug( f'layout time: {self.gemini_manager._layout_time}, evict time: {self.gemini_manager._evict_time}, PCIE move vol: {self.gemini_manager._cpu_gpu_move_volume}B' ) self.gemini_manager.post_iter() diff --git a/colossalai/zero/utils/zero_hook.py b/colossalai/zero/utils/zero_hook.py index a5e621db0..e29266021 100644 --- a/colossalai/zero/utils/zero_hook.py +++ b/colossalai/zero/utils/zero_hook.py @@ -113,7 +113,7 @@ class ZeroHook(BaseOpHook): def post_iter(self): if self._stateful_tensor_mgr: - self.logger.info( + self.logger.debug( f"CPU-GPU data moving this iteration {self._stateful_tensor_mgr.cpu_gpu_move_volume/1e9} GB, get layout info time: {self._stateful_tensor_mgr._layout_time}, evict cpu time: {self._stateful_tensor_mgr._evict_time}", ranks=[0]) self._stateful_tensor_mgr.finish_iter()