mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
23 lines
908 B
23 lines
908 B
import torch.distributed.optim as dist_optim
|
|
import torch.nn as nn
|
|
import torch.optim as optim
|
|
import torchvision.models as tv_models
|
|
from torchvision.transforms import transforms
|
|
|
|
from .registry import Registry
|
|
|
|
LAYERS = Registry('layers', third_party_library=[nn])
|
|
LOSSES = Registry('losses')
|
|
MODELS = Registry('models', third_party_library=[tv_models])
|
|
OPTIMIZERS = Registry('optimizers', third_party_library=[optim, dist_optim])
|
|
OPTIMIZER_WRAPPERS = Registry('optimizer_wrappers')
|
|
DATASETS = Registry('datasets')
|
|
DIST_GROUP_INITIALIZER = Registry('dist_group_initializer')
|
|
GRADIENT_HANDLER = Registry('gradient_handler')
|
|
LOSSES = Registry('losses', third_party_library=[nn])
|
|
HOOKS = Registry('hooks')
|
|
TRANSFORMS = Registry('transforms', third_party_library=[transforms])
|
|
PIPE_ALLOC_POLICY = Registry('pipeline_allocation_policy')
|
|
SAMPLERS = Registry('samplers')
|
|
LR_SCHEDULERS = Registry('lr_schedulers')
|