mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
16 lines
252 B
16 lines
252 B
3 years ago
|
#!/usr/bin/env python
|
||
|
# -*- encoding: utf-8 -*-
|
||
|
|
||
|
import torch
|
||
|
|
||
|
DEPTH = 2
|
||
|
BATCH_SIZE = 512
|
||
|
SEQ_LENGTH = 128
|
||
|
HIDDEN_SIZE = 512
|
||
3 years ago
|
NUM_CLASSES = 1000
|
||
3 years ago
|
NUM_BLOCKS = 6
|
||
3 years ago
|
IMG_SIZE = 224
|
||
3 years ago
|
|
||
|
def check_equal(A, B):
|
||
3 years ago
|
return torch.allclose(A, B, rtol=1e-4, atol=1e-2)
|