mirror of https://github.com/hpcaitech/ColossalAI
fix
parent
703bb5c18d
commit
e1e86f9f1f
|
@ -484,12 +484,7 @@ class RingAttention(torch.autograd.Function):
|
||||||
start = i * num_ring_size
|
start = i * num_ring_size
|
||||||
end = (i + 1) * num_ring_size
|
end = (i + 1) * num_ring_size
|
||||||
for idx in range(start, end):
|
for idx in range(start, end):
|
||||||
inner_rank = []
|
inner_rank = [idx + k * tp_size for k in range(inner_ring_size) if idx + k * tp_size < end]
|
||||||
for k in range(inner_ring_size):
|
|
||||||
current_num = idx + k * tp_size
|
|
||||||
if current_num >= end:
|
|
||||||
break
|
|
||||||
inner_rank.append(current_num)
|
|
||||||
if len(inner_rank) == inner_ring_size and inner_rank not in ranks:
|
if len(inner_rank) == inner_ring_size and inner_rank not in ranks:
|
||||||
ranks.append(inner_rank)
|
ranks.append(inner_rank)
|
||||||
group = dist.new_group(inner_rank)
|
group = dist.new_group(inner_rank)
|
||||||
|
|
Loading…
Reference in New Issue