fix format (#362)

pull/394/head
Kai Wang (Victor Kai) 2022-03-10 11:33:21 +08:00 committed by Frank Lee
parent a77d73f22b
commit 53bb3bcc0a
2 changed files with 1 additions and 7 deletions

View File

@ -14,7 +14,7 @@ from colossalai.context.parallel_mode import ParallelMode
from colossalai.core import global_context as gpc from colossalai.core import global_context as gpc
from colossalai.logging import get_dist_logger from colossalai.logging import get_dist_logger
from colossalai.utils import (print_rank_0, copy_tensor_parallel_attributes, from colossalai.utils import (print_rank_0, copy_tensor_parallel_attributes,
clip_grad_norm_fp32, count_zeros_fp32, multi_tensor_applier, is_using_pp) clip_grad_norm_fp32, count_zeros_fp32, multi_tensor_applier)
def _zero_grad_group_helper(group, set_to_none): def _zero_grad_group_helper(group, set_to_none):

View File

@ -1,10 +1,4 @@
# modified from https://github.com/microsoft/DeepSpeed/blob/master/deepspeed/ops/adam/cpu_adam.py
import math
import torch import torch
import time
from pathlib import Path
import colossalai
class CPUAdam(torch.optim.Optimizer): class CPUAdam(torch.optim.Optimizer):