[NFC] polish colossalai/nn/optimizer/lamb.py code style (#1275)

pull/1298/head
Boyuan Yao 2 years ago committed by Frank Lee
parent 5f6ab35d25
commit b414eaa5db

@ -29,20 +29,16 @@ class Lamb(Optimizer):
https://arxiv.org/abs/1904.00962 https://arxiv.org/abs/1904.00962
""" """
def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-6, def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-6, weight_decay=0, adam=False):
weight_decay=0, adam=False):
if not 0.0 <= lr: if not 0.0 <= lr:
raise ValueError("Invalid learning rate: {}".format(lr)) raise ValueError("Invalid learning rate: {}".format(lr))
if not 0.0 <= eps: if not 0.0 <= eps:
raise ValueError("Invalid epsilon value: {}".format(eps)) raise ValueError("Invalid epsilon value: {}".format(eps))
if not 0.0 <= betas[0] < 1.0: if not 0.0 <= betas[0] < 1.0:
raise ValueError( raise ValueError("Invalid beta parameter at index 0: {}".format(betas[0]))
"Invalid beta parameter at index 0: {}".format(betas[0]))
if not 0.0 <= betas[1] < 1.0: if not 0.0 <= betas[1] < 1.0:
raise ValueError( raise ValueError("Invalid beta parameter at index 1: {}".format(betas[1]))
"Invalid beta parameter at index 1: {}".format(betas[1])) defaults = dict(lr=lr, betas=betas, eps=eps, weight_decay=weight_decay)
defaults = dict(lr=lr, betas=betas, eps=eps,
weight_decay=weight_decay)
self.adam = adam self.adam = adam
super(Lamb, self).__init__(params, defaults) super(Lamb, self).__init__(params, defaults)
@ -63,8 +59,7 @@ class Lamb(Optimizer):
continue continue
grad = p.grad.data grad = p.grad.data
if grad.is_sparse: if grad.is_sparse:
raise RuntimeError( raise RuntimeError('Lamb does not support sparse gradients, consider SparseAdam instad.')
'Lamb does not support sparse gradients, consider SparseAdam instad.')
state = self.state[p] state = self.state[p]

Loading…
Cancel
Save