mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
21 lines
531 B
21 lines
531 B
from .checkpoint import MoECheckpointIO
|
|
from .experts import MLPExperts
|
|
from .layers import SparseMLP, apply_load_balance
|
|
from .manager import MOE_MANAGER
|
|
from .routers import MoeRouter, Top1Router, Top2Router, TopKRouter
|
|
from .utils import NormalNoiseGenerator, UniformNoiseGenerator
|
|
|
|
__all__ = [
|
|
"MLPExperts",
|
|
"MoeRouter",
|
|
"Top1Router",
|
|
"Top2Router",
|
|
"TopKRouter",
|
|
"NormalNoiseGenerator",
|
|
"UniformNoiseGenerator",
|
|
"SparseMLP",
|
|
"MoECheckpointIO",
|
|
"MOE_MANAGER",
|
|
"apply_load_balance",
|
|
]
|