diff --git a/colossalai/engine/gradient_handler/_base_gradient_handler.py b/colossalai/engine/gradient_handler/_base_gradient_handler.py index 0c1309898..c21235986 100644 --- a/colossalai/engine/gradient_handler/_base_gradient_handler.py +++ b/colossalai/engine/gradient_handler/_base_gradient_handler.py @@ -12,6 +12,7 @@ class BaseGradientHandler(ABC): model (Module): Model where the gradients accumulate. optimizer (Optimizer): Optimizer for updating the parameters. """ + def __init__(self, model, optimizer): self._model = model self._optimizer = optimizer diff --git a/colossalai/engine/ophooks/_base_ophook.py b/colossalai/engine/ophooks/_base_ophook.py index e948a8cfb..24251141d 100644 --- a/colossalai/engine/ophooks/_base_ophook.py +++ b/colossalai/engine/ophooks/_base_ophook.py @@ -5,6 +5,7 @@ import torch class BaseOpHook(ABC): """This class allows users to add customized operations before and after the execution of a PyTorch submodule""" + def __init__(self): pass