mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
13 lines
396 B
13 lines
396 B
from typing import Optional
|
|
from colossalai.tensor.distspec import _DistSpec, DistPlacementPattern
|
|
from .compute_spec import ComputeSpec
|
|
from colossalai.tensor import ProcessGroup
|
|
from dataclasses import dataclass
|
|
|
|
|
|
@dataclass
|
|
class ColoTensorSpec:
|
|
pg: ProcessGroup
|
|
dist_attr: Optional[_DistSpec] = _DistSpec(DistPlacementPattern.REPLICATE)
|
|
compute_attr: Optional[ComputeSpec] = None
|