mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
39 lines
1.7 KiB
39 lines
1.7 KiB
3 years ago
|
import torch
|
||
3 years ago
|
import torch.nn.functional as F
|
||
|
from typing import Optional
|
||
3 years ago
|
from colossalai.tensor.op_wrapper import colo_op_impl
|
||
|
from colossalai.tensor import ColoTensor
|
||
3 years ago
|
from colossalai.nn.loss.loss_1d import VocabParallelCrossEntropyLoss1D
|
||
3 years ago
|
from ._utils import GeneralTensor, convert_to_colo_tensor
|
||
3 years ago
|
|
||
3 years ago
|
|
||
3 years ago
|
@colo_op_impl(F.cross_entropy)
|
||
|
def colo_cross_entropy(input_tensor: GeneralTensor,
|
||
|
target: GeneralTensor,
|
||
|
weight: Optional[GeneralTensor] = None,
|
||
|
size_average: Optional[bool] = None,
|
||
|
ignore_index: int = -100,
|
||
|
reduce: Optional[bool] = None,
|
||
|
reduction: str = "mean",
|
||
|
label_smoothing: float = 0.0):
|
||
|
input_tensor, target, weight = tuple(map(convert_to_colo_tensor, (input_tensor, target, weight)))
|
||
3 years ago
|
|
||
3 years ago
|
if input_tensor.spec.is_gathered(): # Input is gathered
|
||
3 years ago
|
output = F.cross_entropy(input_tensor,
|
||
|
target,
|
||
|
weight=weight,
|
||
|
size_average=size_average,
|
||
|
ignore_index=ignore_index,
|
||
|
reduce=reduce,
|
||
|
reduction=reduction,
|
||
|
label_smoothing=label_smoothing)
|
||
|
return ColoTensor.from_torch_tensor(output)
|
||
3 years ago
|
elif input_tensor.has_spec(): # Single Model Parallel Applied
|
||
3 years ago
|
if input_tensor.spec.is_1D_col():
|
||
3 years ago
|
output = VocabParallelCrossEntropyLoss1D()(input_tensor, target)
|
||
|
return ColoTensor.from_torch_tensor(output)
|
||
3 years ago
|
else:
|
||
|
raise NotImplementedError
|
||
|
else:
|
||
|
raise NotImplementedError
|