From f2d9b63545b73b81941444fded7dc4f714057688 Mon Sep 17 00:00:00 2001 From: YWMditto <862779238@qq.com> Date: Fri, 3 Nov 2023 16:46:14 +0800 Subject: [PATCH] support dynamic ntk in transformers --- tools/transformers/internlm_model/modeling_internlm.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/tools/transformers/internlm_model/modeling_internlm.py b/tools/transformers/internlm_model/modeling_internlm.py index 2ef896d..a850b26 100644 --- a/tools/transformers/internlm_model/modeling_internlm.py +++ b/tools/transformers/internlm_model/modeling_internlm.py @@ -137,14 +137,7 @@ class InternLMRotaryEmbedding(torch.nn.Module): class InternLMDynamicNTKScalingRotaryEmbedding(torch.nn.Module): - """实现dynamic ntk rope; - - 需要保证: - 1. 长度小于 seq len 时能够不断地复用; - 2. 长度超过 seq len 时,每一个 新的token,都需要一个新的base; - - Args: - InternLMRotaryEmbedding (_type_): _description_ + """Implement dynamic ntk rope. """ def __init__(self, dim, max_position_embeddings=2048, base=10000, device=None, scaling_factor=1.0):