From 717f0c9e64f6002a61b48a7f091cca077ae1325a Mon Sep 17 00:00:00 2001 From: Wenwen Qu Date: Wed, 13 Sep 2023 13:00:49 +0800 Subject: [PATCH] change float16 to bfloat16 --- configs/7B_sft.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configs/7B_sft.py b/configs/7B_sft.py index e0b9a8a..7f44533 100644 --- a/configs/7B_sft.py +++ b/configs/7B_sft.py @@ -128,7 +128,7 @@ model = dict( num_layers=NUM_LAYER, mlp_ratio=MLP_RATIO, apply_post_layer_norm=False, - dtype="torch.float16", # Support: "torch.float16", "torch.half", "torch.bfloat16", "torch.float32", "torch.tf32" + dtype="torch.bfloat16", # Support: "torch.float16", "torch.half", "torch.bfloat16", "torch.float32", "torch.tf32" norm_type="rmsnorm", layer_norm_epsilon=1e-5, use_flash_attn=True,