2022-07-19 06:15:28 +00:00
|
|
|
import torch
|
|
|
|
import torch.distributed as dist
|
|
|
|
from colossalai.tensor import ColoTensor, ColoTensorSpec
|
2022-07-21 02:53:15 +00:00
|
|
|
from colossalai.tensor.distspec import _DistSpec, DistPlacementPattern
|
2022-07-19 06:15:28 +00:00
|
|
|
|
|
|
|
|
2022-07-26 06:13:38 +00:00
|
|
|
def robust_broadcast(tensor):
|
|
|
|
with torch.no_grad():
|
|
|
|
is_cpu_ten = tensor.device.type == 'cpu'
|
|
|
|
if is_cpu_ten:
|
|
|
|
b_data = tensor.cuda()
|
|
|
|
else:
|
|
|
|
b_data = tensor
|
|
|
|
|
|
|
|
dist.broadcast(b_data, 0)
|
|
|
|
|
|
|
|
if is_cpu_ten:
|
|
|
|
tensor.copy_(b_data)
|
|
|
|
|
|
|
|
|
2022-07-19 06:15:28 +00:00
|
|
|
def gather_tensor(colo_tensor: ColoTensor) -> None:
|
|
|
|
"""Make colo_tensor replicated when the rank is 0
|
|
|
|
"""
|
|
|
|
if not colo_tensor.is_replicate():
|
|
|
|
pg = colo_tensor.get_process_group()
|
|
|
|
# for the group which contains rank 0
|
2022-07-29 07:58:06 +00:00
|
|
|
if pg.dp_local_rank() == 0:
|
2022-07-19 06:15:28 +00:00
|
|
|
old_dist_spec = colo_tensor.dist_spec
|
|
|
|
colo_tensor.to_replicate_()
|
|
|
|
if dist.get_rank() != 0:
|
|
|
|
colo_tensor.set_dist_spec(old_dist_spec)
|
|
|
|
|
|
|
|
# synchronize all processes for unexpected problems
|
|
|
|
dist.barrier()
|
|
|
|
|
|
|
|
if dist.get_rank() == 0:
|
2023-04-26 03:38:43 +00:00
|
|
|
setattr(colo_tensor, 'save_ready', True) # set saving signature
|
2022-07-19 06:15:28 +00:00
|
|
|
|
|
|
|
|
|
|
|
def scatter_tensor(colo_tensor: ColoTensor, dist_spec: _DistSpec) -> None:
|
|
|
|
"""Reversal operation of `gather_tensor`.
|
|
|
|
"""
|
2022-07-21 02:53:15 +00:00
|
|
|
if dist_spec.placement == DistPlacementPattern.REPLICATE:
|
2022-07-26 06:13:38 +00:00
|
|
|
robust_broadcast(colo_tensor.data)
|
2022-07-19 06:15:28 +00:00
|
|
|
else:
|
|
|
|
global_size = colo_tensor.size_global()
|
|
|
|
|
|
|
|
if dist.get_rank() == 0:
|
|
|
|
entire_data = colo_tensor.data
|
|
|
|
else:
|
|
|
|
entire_data = torch.empty(global_size, device=colo_tensor.device)
|
2022-07-26 06:13:38 +00:00
|
|
|
robust_broadcast(entire_data)
|
2022-07-19 06:15:28 +00:00
|
|
|
|
|
|
|
if dist.get_rank() == 0:
|
|
|
|
colo_tensor.set_dist_spec(dist_spec)
|
|
|
|
else:
|
2022-07-27 03:03:14 +00:00
|
|
|
rep_tensor = ColoTensor(
|
|
|
|
entire_data, ColoTensorSpec(pg=colo_tensor.get_process_group(), compute_attr=colo_tensor.compute_spec))
|
2022-07-19 06:15:28 +00:00
|
|
|
rep_tensor.set_dist_spec(dist_spec)
|
|
|
|
with torch.no_grad():
|
|
|
|
colo_tensor.data.copy_(rep_tensor.data)
|
|
|
|
# synchronize all processes for unexpected problems
|
|
|
|
dist.barrier()
|