mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
33 lines
767 B
33 lines
767 B
import torch
|
|
|
|
from colossalai.registry import OPHOOKS
|
|
|
|
from . import BaseOpHook
|
|
|
|
|
|
@OPHOOKS.register_module
|
|
class ShardGradMemTracerHook(BaseOpHook):
|
|
"""
|
|
A hook to process sharded param before and after FWD and BWD operator executing.
|
|
"""
|
|
|
|
def __init__(self):
|
|
super().__init__()
|
|
|
|
def pre_fwd_exec(self, module: torch.nn.Module, *args):
|
|
pass
|
|
|
|
def post_fwd_exec(self, module: torch.nn.Module, *args):
|
|
pass
|
|
|
|
def pre_bwd_exec(self, module: torch.nn.Module, input, output):
|
|
for param in module.parameters():
|
|
assert hasattr(param, '_sharded_grad')
|
|
param._sharded_grad.setup()
|
|
|
|
def post_bwd_exec(self, module: torch.nn.Module, input):
|
|
pass
|
|
|
|
def post_iter(self):
|
|
pass
|