[shardformer] rename policy file name

pull/4445/head
ver217 2023-07-05 15:13:00 +08:00 committed by Hongxin Liu
parent 5fc60a3a04
commit 1ed3f8a24f
11 changed files with 12 additions and 12 deletions

View File

@ -3,7 +3,7 @@ from dataclasses import dataclass
import torch.nn as nn
from .basepolicy import Policy
from .base_policy import Policy
__all__ = ["PolicyLocation", "get_autopolicy", "import_policy"]

View File

@ -3,7 +3,7 @@ import torch.nn as nn
import colossalai.shardformer.layer as col_nn
from .._utils import getattr_, setattr_
from .basepolicy import ModulePolicyDescription, Policy, SubModuleReplacementDescription
from .base_policy import ModulePolicyDescription, Policy, SubModuleReplacementDescription
__all__ = [
'BertPolicy', 'BertModelPolicy', 'BertForPretrainingPolicy', 'BertLMHeadModelPolicy', 'BertForMaskedLMPolicy',

View File

@ -4,7 +4,7 @@ import colossalai.shardformer.layer as col_nn
from .._utils import getattr_, setattr_
from ..modeling.bloom import build_bloom_alibi_tensor_fn
from .basepolicy import ModulePolicyDescription, Policy, SubModuleReplacementDescription
from .base_policy import ModulePolicyDescription, Policy, SubModuleReplacementDescription
class BloomPolicy(Policy):

View File

@ -3,7 +3,7 @@ import torch.nn as nn
import colossalai.shardformer.layer as col_nn
from .._utils import getattr_, setattr_
from .basepolicy import ModulePolicyDescription, Policy, SubModuleReplacementDescription
from .base_policy import ModulePolicyDescription, Policy, SubModuleReplacementDescription
__all__ = [
'GPT2Policy', 'GPT2ModelPolicy', 'GPT2LMHeadModelPolicy', 'GPT2DoubleHeadsModelPolicy',

View File

@ -4,7 +4,7 @@ import torch.nn as nn
from colossalai.shardformer.layer import FusedRMSNorm, Linear1D_Col, Linear1D_Row, VocabParallelEmbedding1D
from .basepolicy import ModulePolicyDescription, Policy, SubModuleReplacementDescription
from .base_policy import ModulePolicyDescription, Policy, SubModuleReplacementDescription
__all__ = ['LlamaPolicy', 'LlamaForCausalLMPolicy', 'LlamaForSequenceClassificationPolicy']

View File

@ -1,7 +1,7 @@
from colossalai.shardformer.layer import FusedLayerNorm, Linear1D_Col, Linear1D_Row, VocabParallelEmbedding1D
from .._utils import getattr_, setattr_
from .basepolicy import ModulePolicyDescription, Policy, SubModuleReplacementDescription
from .base_policy import ModulePolicyDescription, Policy, SubModuleReplacementDescription
__all__ = [
'OPTPolicy', 'OPTModelPolicy', 'OPTForCausalLMPolicy', 'OPTForSequenceClassificationPolicy',

View File

@ -6,10 +6,10 @@ from colossalai.shardformer.layer import (
Linear1D_Row,
VocabParallelEmbedding1D,
)
from colossalai.shardformer.policies.basepolicy import ModulePolicyDescription
from colossalai.shardformer.policies.base_policy import ModulePolicyDescription
from .._utils import getattr_, setattr_
from .basepolicy import ModulePolicyDescription, Policy, SubModuleReplacementDescription
from .base_policy import ModulePolicyDescription, Policy, SubModuleReplacementDescription
__all__ = ["T5ModelPolicy", "T5ForConditionalGenerationPolicy", "T5EncoderPolicy"]

View File

@ -4,7 +4,7 @@ import torch.nn as nn
from colossalai.shardformer.layer import DropoutForReplicatedInput, FusedLayerNorm, Linear1D_Col, Linear1D_Row
from .basepolicy import ModulePolicyDescription, Policy, SubModuleReplacementDescription
from .base_policy import ModulePolicyDescription, Policy, SubModuleReplacementDescription
__all__ = ['ViTPolicy']

View File

@ -6,8 +6,8 @@ from torch import Tensor
from colossalai.lazy import LazyTensor
from .._utils import getattr_, setattr_
from ..policies.autopolicy import get_autopolicy
from ..policies.basepolicy import Policy, SubModuleReplacementDescription
from ..policies.auto_policy import get_autopolicy
from ..policies.base_policy import Policy, SubModuleReplacementDescription
from .shard_config import ShardConfig
from .utils import set_tensors_to_none

View File

@ -5,7 +5,7 @@ from torch import Tensor
from colossalai.cluster import DistCoordinator
from ..policies.basepolicy import Policy
from ..policies.base_policy import Policy
from .shard_config import ShardConfig
from .sharder import ModelSharder