You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
ColossalAI/colossalai/nn/metric/accuracy_2d.py

18 lines
469 B

import torch
from colossalai.nn.layer.parallel_2d import reduce_by_batch_2d, split_batch_2d
from torch import nn
from ._utils import calc_acc
class Accuracy2D(nn.Module):
def __init__(self):
super().__init__()
def forward(self, logits, targets):
with torch.no_grad():
targets = split_batch_2d(targets)
correct = calc_acc(logits, targets)
correct = reduce_by_batch_2d.apply(correct)
return correct