mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
14 lines
296 B
14 lines
296 B
import operator
|
|
|
|
import torch
|
|
import torch.nn as nn
|
|
|
|
# list of inplace module
|
|
INPLACE_MODULE = [nn.ReLU]
|
|
|
|
# list of inplace operations
|
|
INPLACE_OPS = [torch.flatten]
|
|
|
|
# list of operations that do not save forward activations
|
|
NO_SAVE_ACTIVATION = [torch.add, torch.sub, operator.add, operator.sub]
|