mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
12 lines
384 B
12 lines
384 B
from typing import List
|
|
|
|
from torch import Tensor
|
|
|
|
def forward_affine(input: Tensor, normalized_shape: List[int], gamma: Tensor, beta: Tensor, epsilon: float) -> List[Tensor]:
|
|
...
|
|
|
|
|
|
def backward_affine(dout: Tensor, mean: Tensor, invvar: Tensor, input: Tensor,
|
|
normalized_shape: List[int], gamma: Tensor, beta: Tensor, epsilon: float) -> List[Tensor]:
|
|
...
|