mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
18 lines
410 B
18 lines
410 B
import torch.distributed as dist
|
|
from torch.distributed import ProcessGroup
|
|
|
|
|
|
class BaseStore:
|
|
|
|
def __init__(self, torch_pg: ProcessGroup):
|
|
self._world_size = dist.get_world_size(group=torch_pg)
|
|
self._local_rank = dist.get_rank(group=torch_pg)
|
|
|
|
@property
|
|
def world_size(self):
|
|
return self._world_size
|
|
|
|
@property
|
|
def local_rank(self):
|
|
return self._local_rank
|