mirror of https://github.com/hpcaitech/ColossalAI
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
22 lines
735 B
22 lines
735 B
from dataclasses import dataclass, field
|
|
from typing import Optional
|
|
|
|
from colossalai.legacy.tensor.distspec import DistPlacementPattern, _DistSpec
|
|
from colossalai.legacy.tensor.process_group import ProcessGroup
|
|
|
|
from .compute_spec import ComputeSpec
|
|
|
|
|
|
@dataclass
|
|
class ColoTensorSpec:
|
|
"""ColoTensorSpec
|
|
|
|
A data class for specifications of the `ColoTensor`.
|
|
It contains attributes of `ProcessGroup`, `_DistSpec`, `ComputeSpec`.
|
|
The latter two attributes are optional. If not set, they are default value is `Replicate()` and `None`.
|
|
"""
|
|
|
|
pg: ProcessGroup
|
|
dist_attr: Optional[_DistSpec] = field(default_factory=lambda: _DistSpec(DistPlacementPattern.REPLICATE))
|
|
compute_attr: Optional[ComputeSpec] = None
|