You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
ColossalAI/colossalai/cluster/__init__.py

6 lines
227 B

from .device_mesh_manager import DeviceMeshManager
from .dist_coordinator import DistCoordinator
from .process_group_manager import ProcessGroupManager
__all__ = ['DistCoordinator', 'ProcessGroupManager', 'DeviceMeshManager']