mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
23 lines
907 B
23 lines
907 B
import torch.distributed.optim as dist_optim
|
|
import torch.nn as nn
|
|
import torch.optim as optim
|
|
import torchvision.models as tv_models
|
|
import torchvision.datasets as tv_datasets
|
|
from torchvision import transforms
|
|
|
|
from .registry import Registry
|
|
|
|
LAYERS = Registry('layers', third_party_library=[nn])
|
|
LOSSES = Registry('losses')
|
|
MODELS = Registry('models', third_party_library=[tv_models])
|
|
OPTIMIZERS = Registry('optimizers', third_party_library=[optim, dist_optim])
|
|
DATASETS = Registry('datasets', third_party_library=[tv_datasets])
|
|
DIST_GROUP_INITIALIZER = Registry('dist_group_initializer')
|
|
GRADIENT_HANDLER = Registry('gradient_handler')
|
|
LOSSES = Registry('losses', third_party_library=[nn])
|
|
HOOKS = Registry('hooks')
|
|
TRANSFORMS = Registry('transforms', third_party_library=[transforms])
|
|
DATA_SAMPLERS = Registry('data_samplers')
|
|
LR_SCHEDULERS = Registry('lr_schedulers')
|
|
SCHEDULE = Registry('schedules')
|