pull/6071/head
wangbluo 2024-09-25 19:00:38 +08:00
parent 6fb1322db1
commit 91ed32c256
3 changed files with 3 additions and 3 deletions

View File

@ -500,8 +500,8 @@ class RingAttention(torch.autograd.Function):
k, k,
v, v,
sp_group, sp_group,
tp_group: Optional[dist.ProcessGroup],
attention_mask_type, attention_mask_type,
tp_group=None,
cu_seqlens=None, cu_seqlens=None,
max_seqlen=None, max_seqlen=None,
valid_indices=None, valid_indices=None,

View File

@ -866,7 +866,7 @@ def get_gpt2_flash_attention_forward(shard_config: Optional[ShardConfig] = None)
key, key,
value, value,
sp_group, sp_group,
tp_group, tp_group=tp_group,
**attention_mask, **attention_mask,
dropout_p=dropout_p, dropout_p=dropout_p,
scale=scale, scale=scale,

View File

@ -571,7 +571,7 @@ def get_llama_flash_attention_forward(shard_config: ShardConfig, sp_mode=None, s
key_states, key_states,
value_states, value_states,
sp_group, sp_group,
tp_group, tp_group=tp_group,
**attention_mask, **attention_mask,
inner_ring_size=shard_config.inner_ring_size, inner_ring_size=shard_config.inner_ring_size,
) )