mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
37 lines
869 B
37 lines
869 B
import torch.nn as nn
|
|
|
|
|
|
class ModelWrapper(nn.Module):
|
|
"""
|
|
A wrapper class to define the common interface used by booster.
|
|
|
|
Args:
|
|
module (nn.Module): The model to be wrapped.
|
|
"""
|
|
|
|
def __init__(self, module: nn.Module) -> None:
|
|
super().__init__()
|
|
self.module = module
|
|
|
|
def unwrap(self):
|
|
"""
|
|
Unwrap the model to return the original model for checkpoint saving/loading.
|
|
"""
|
|
if isinstance(self.module, ModelWrapper):
|
|
return self.module.unwrap()
|
|
return self.module
|
|
|
|
def forward(self, *args, **kwargs):
|
|
return self.module(*args, **kwargs)
|
|
|
|
|
|
class AMPModelMixin:
|
|
"""This mixin class defines the interface for AMP training.
|
|
"""
|
|
|
|
def update_master_params(self):
|
|
"""
|
|
Update the master parameters for AMP training.
|
|
"""
|
|
pass
|