mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
20 lines
690 B
20 lines
690 B
3 years ago
|
import torch.distributed.optim as dist_optim
|
||
|
import torch.nn as nn
|
||
|
import torch.optim as optim
|
||
|
|
||
|
from .registry import Registry
|
||
|
|
||
3 years ago
|
LAYERS = Registry("layers", third_party_library=[nn])
|
||
3 years ago
|
MODELS = Registry("models")
|
||
3 years ago
|
OPTIMIZERS = Registry("optimizers", third_party_library=[optim, dist_optim])
|
||
3 years ago
|
DATASETS = Registry("datasets")
|
||
3 years ago
|
DIST_GROUP_INITIALIZER = Registry("dist_group_initializer")
|
||
|
GRADIENT_HANDLER = Registry("gradient_handler")
|
||
|
LOSSES = Registry("losses", third_party_library=[nn])
|
||
|
HOOKS = Registry("hooks")
|
||
3 years ago
|
TRANSFORMS = Registry("transforms")
|
||
3 years ago
|
DATA_SAMPLERS = Registry("data_samplers")
|
||
|
LR_SCHEDULERS = Registry("lr_schedulers")
|
||
|
SCHEDULE = Registry("schedules")
|
||
|
OPHOOKS = Registry("ophooks")
|