mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
30 lines
779 B
30 lines
779 B
from enum import Enum
|
|
|
|
|
|
class ComputePattern(Enum):
|
|
TP1D = 0
|
|
TP2D = 1
|
|
TP2P5D = 2
|
|
TP3D = 3
|
|
|
|
|
|
class ComputeSpec(object):
|
|
"""ComputeSpec
|
|
The Specification for computation pattern
|
|
|
|
Args:
|
|
compute_pattern (ComputePattern): an Enum instance for compute pattern.
|
|
"""
|
|
|
|
def __init__(self, compute_pattern: ComputePattern) -> None:
|
|
assert isinstance(compute_pattern, ComputePattern)
|
|
self.compute_pattern = compute_pattern
|
|
# Make sure output tensors are replicate
|
|
self.output_replicate = True
|
|
|
|
def __repr__(self):
|
|
return f'ComputeSpec(pattern={self.compute_pattern}, replicate_output={self.output_replicate})'
|
|
|
|
def set_output_replicate(self, flag: bool = True):
|
|
self.output_replicate = flag
|