You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
ColossalAI/colossalai/nn/layer/moe/__init__.py

8 lines
238 B

from ._operation import AllToAll
from .layers import Experts, MoeLayer, \
NormalNoiseGenerator, Top1Router, Top2Router
__all__ = [
'AllToAll', 'Experts', 'Top1Router', 'Top2Router',
'MoeLayer', 'NormalNoiseGenerator'
]