mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
17 lines
409 B
17 lines
409 B
import torch.distributed as dist
|
|
from torch.distributed import ProcessGroup
|
|
|
|
|
|
class BaseStore:
|
|
def __init__(self, torch_pg: ProcessGroup):
|
|
self._world_size = dist.get_world_size(group=torch_pg)
|
|
self._local_rank = dist.get_rank(group=torch_pg)
|
|
|
|
@property
|
|
def world_size(self):
|
|
return self._world_size
|
|
|
|
@property
|
|
def local_rank(self):
|
|
return self._local_rank
|