[test] fix chatglm test kit (#5793)

pull/5797/head^2
Hongxin Liu 6 months ago committed by GitHub
parent 74f4a29734
commit 587bbf4c6d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -33,22 +33,6 @@ loss_fn_for_chatglm_model = lambda x: torch.nn.functional.mse_loss(
) )
loss_fn = lambda x: x["loss"] loss_fn = lambda x: x["loss"]
config = AutoConfig.from_pretrained(
"THUDM/chatglm2-6b",
trust_remote_code=True,
num_layers=2,
padded_vocab_size=65024,
hidden_size=64,
ffn_hidden_size=214,
num_attention_heads=8,
kv_channels=16,
rmsnorm=True,
original_rope=True,
use_cache=True,
multi_query_attention=False,
torch_dtype=torch.float32,
)
infer_config = AutoConfig.from_pretrained( infer_config = AutoConfig.from_pretrained(
"THUDM/chatglm2-6b", "THUDM/chatglm2-6b",
@ -68,6 +52,21 @@ infer_config = AutoConfig.from_pretrained(
def init_chatglm(): def init_chatglm():
config = AutoConfig.from_pretrained(
"THUDM/chatglm2-6b",
trust_remote_code=True,
num_layers=2,
padded_vocab_size=65024,
hidden_size=64,
ffn_hidden_size=214,
num_attention_heads=8,
kv_channels=16,
rmsnorm=True,
original_rope=True,
use_cache=True,
multi_query_attention=False,
torch_dtype=torch.float32,
)
model = AutoModelForCausalLM.from_config(config, empty_init=False, trust_remote_code=True) model = AutoModelForCausalLM.from_config(config, empty_init=False, trust_remote_code=True)
for m in model.modules(): for m in model.modules():
if m.__class__.__name__ == "RMSNorm": if m.__class__.__name__ == "RMSNorm":

Loading…
Cancel
Save