You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
ColossalAI/colossalai/legacy/context/parallel_mode.py

49 lines
1.1 KiB

3 years ago
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
from enum import Enum
# parallel modes
class ParallelMode(Enum):
"""This is an enumeration class containing all possible parallel modes."""
3 years ago
GLOBAL = "global"
3 years ago
# common parallel
DATA = "data"
3 years ago
# model parallel - containing tensor and pipeline parallel groups
# this is added to facilitate amp and grad clipping in hybrid parallel
MODEL = "model"
3 years ago
# pipeline parallel
PIPELINE = "pipe"
3 years ago
# containing all ranks in tensor parallel
TENSOR = "tensor"
3 years ago
# sequence parallel
SEQUENCE = "sequence"
SEQUENCE_DP = "sequence_dp"
3 years ago
# 1D Parallel
PARALLEL_1D = "1d"
3 years ago
# 2D parallel
PARALLEL_2D_ROW = "2d_row"
PARALLEL_2D_COL = "2d_col"
3 years ago
# 3D parallel
PARALLEL_3D_INPUT = "3d_input"
PARALLEL_3D_WEIGHT = "3d_weight"
PARALLEL_3D_OUTPUT = "3d_output"
PARALLEL_3D_INPUT_X_WEIGHT = "3d_input_x_weight"
PARALLEL_3D_OUTPUT_X_WEIGHT = "3d_output_x_weight"
3 years ago
# 2.5D parallel
PARALLEL_2P5D_ROW = "2p5d_row"
PARALLEL_2P5D_COL = "2p5d_col"
PARALLEL_2P5D_DEP = "2p5d_dep"
PARALLEL_2P5D_XZ = "2p5d_xz"