mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
25 lines
868 B
25 lines
868 B
3 years ago
|
import torch
|
||
|
|
||
|
|
||
|
def bias_dropout_add(x, bias, residual, prob, training):
|
||
|
# type: (Tensor, Tensor, Tensor, float, bool) -> Tensor
|
||
|
out = torch.nn.functional.dropout(x + bias, p=prob, training=training)
|
||
|
out = residual + out
|
||
|
return out
|
||
|
|
||
|
|
||
|
@torch.jit.script
|
||
|
def bias_dropout_add_fused_train(x: torch.Tensor,
|
||
|
bias: torch.Tensor,
|
||
|
residual: torch.Tensor,
|
||
|
prob: float) -> torch.Tensor:
|
||
|
return bias_dropout_add(x, bias, residual, prob, True)
|
||
|
|
||
|
|
||
|
@torch.jit.script
|
||
|
def bias_dropout_add_fused_inference(x: torch.Tensor,
|
||
|
bias: torch.Tensor,
|
||
|
residual: torch.Tensor,
|
||
|
prob: float) -> torch.Tensor:
|
||
|
return bias_dropout_add(x, bias, residual, prob, False)
|