[NFC] polish colossalai/kernel/cuda_native/layer_norm.py code style (#980)

pull/997/head
puck_WCR 2022-05-16 14:43:32 +08:00 committed by binmakeswell
parent c50c08dcbb
commit bda70b4b66
1 changed files with 3 additions and 4 deletions

View File

@ -24,8 +24,8 @@ class FusedLayerNormAffineFunction(torch.autograd.Function):
input_ = input.contiguous()
weight_ = weight.contiguous()
bias_ = bias.contiguous()
output, mean, invvar = colossal_layer_norm_cuda.forward_affine(
input_, ctx.normalized_shape, weight_, bias_, ctx.eps)
output, mean, invvar = colossal_layer_norm_cuda.forward_affine(input_, ctx.normalized_shape, weight_, bias_,
ctx.eps)
ctx.save_for_backward(input_, weight_, bias_, mean, invvar)
return output
@ -72,8 +72,7 @@ class MixedFusedLayerNorm(torch.nn.Module):
def forward(self, input):
return FusedLayerNormAffineFunction.apply(input, self.weight, self.bias,
self.normalized_shape, self.eps)
return FusedLayerNormAffineFunction.apply(input, self.weight, self.bias, self.normalized_shape, self.eps)
def __repr__(self):
return f'MixedFusedLayerNorm(normalized_shape={self.normalized_shape}, eps={self.eps})'