You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
ColossalAI/tests/test_layers/test_1d/checks_1d/common.py

16 lines
266 B

#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import torch
DEPTH = 4
BATCH_SIZE = 8
SEQ_LENGTH = 8
IMG_SIZE = 16
HIDDEN_SIZE = 8
NUM_CLASSES = 8
VOCAB_SIZE = 16
def check_equal(A, B):
assert torch.allclose(A, B, rtol=1e-3, atol=1e-1) == True