From b414eaa5dbbc58f001394b9e9e9940d06c36bebf Mon Sep 17 00:00:00 2001 From: Boyuan Yao <70263930+Cypher30@users.noreply.github.com> Date: Tue, 12 Jul 2022 23:15:05 +0800 Subject: [PATCH] [NFC] polish colossalai/nn/optimizer/lamb.py code style (#1275) --- colossalai/nn/optimizer/lamb.py | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/colossalai/nn/optimizer/lamb.py b/colossalai/nn/optimizer/lamb.py index aa137098a..bcca990f0 100644 --- a/colossalai/nn/optimizer/lamb.py +++ b/colossalai/nn/optimizer/lamb.py @@ -29,20 +29,16 @@ class Lamb(Optimizer): https://arxiv.org/abs/1904.00962 """ - def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-6, - weight_decay=0, adam=False): + def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-6, weight_decay=0, adam=False): if not 0.0 <= lr: raise ValueError("Invalid learning rate: {}".format(lr)) if not 0.0 <= eps: raise ValueError("Invalid epsilon value: {}".format(eps)) if not 0.0 <= betas[0] < 1.0: - raise ValueError( - "Invalid beta parameter at index 0: {}".format(betas[0])) + raise ValueError("Invalid beta parameter at index 0: {}".format(betas[0])) if not 0.0 <= betas[1] < 1.0: - raise ValueError( - "Invalid beta parameter at index 1: {}".format(betas[1])) - defaults = dict(lr=lr, betas=betas, eps=eps, - weight_decay=weight_decay) + raise ValueError("Invalid beta parameter at index 1: {}".format(betas[1])) + defaults = dict(lr=lr, betas=betas, eps=eps, weight_decay=weight_decay) self.adam = adam super(Lamb, self).__init__(params, defaults) @@ -63,8 +59,7 @@ class Lamb(Optimizer): continue grad = p.grad.data if grad.is_sparse: - raise RuntimeError( - 'Lamb does not support sparse gradients, consider SparseAdam instad.') + raise RuntimeError('Lamb does not support sparse gradients, consider SparseAdam instad.') state = self.state[p]