mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
34 lines
1.1 KiB
34 lines
1.1 KiB
#!/usr/bin/env python
|
|
# -*- encoding: utf-8 -*-
|
|
|
|
from abc import ABC, abstractmethod
|
|
|
|
from colossalai.context import Config
|
|
|
|
|
|
class ProcessGroupInitializer(ABC):
|
|
"""An object, knowing the parallelism configuration, that initializes parallel groups.
|
|
|
|
Args:
|
|
rank (int): The rank of current process.
|
|
world_size (int): Size of whole communication world.
|
|
config (Config): Running configuration.
|
|
data_parallel_size (int): Size of data parallel.
|
|
pipeline_parallel_size (int): Size of pipeline parallel.
|
|
tensor_parallel_size (int): Size of tensor parallel.
|
|
"""
|
|
|
|
def __init__(self, rank: int, world_size: int, config: Config, data_parallel_size: int, pipeline_parallel_size: int,
|
|
tensor_parallel_size: int):
|
|
self.rank = rank
|
|
self.world_size = world_size
|
|
self.data_parallel_size = data_parallel_size
|
|
self.config = config
|
|
self.pipeline_parallel_size = pipeline_parallel_size
|
|
self.tensor_parallel_size = tensor_parallel_size
|
|
super().__init__()
|
|
|
|
@abstractmethod
|
|
def init_dist_group(self):
|
|
pass
|