From c59d7aca095d205df404c0c6e831e7ae33b785f1 Mon Sep 17 00:00:00 2001 From: Kun Lin <81014421+klhhhhh@users.noreply.github.com> Date: Fri, 7 Jul 2023 14:06:46 +0800 Subject: [PATCH] Feature/vit support (#4182) * [shardformer] added tests * [shardformer] vit test finish and support * fix attention dropout --- colossalai/shardformer/policies/vit.py | 83 +++++++++++++------------- 1 file changed, 42 insertions(+), 41 deletions(-) diff --git a/colossalai/shardformer/policies/vit.py b/colossalai/shardformer/policies/vit.py index 47f2c58fc..96f27de2a 100644 --- a/colossalai/shardformer/policies/vit.py +++ b/colossalai/shardformer/policies/vit.py @@ -1,4 +1,3 @@ -from functools import partial from typing import Callable, Dict, List, Union import torch.nn as nn @@ -36,7 +35,7 @@ class ViTPolicy(Policy): suffix="dropout", target_module=col_nn.DropoutForReplicatedInput, ) - ]) + ]) policy[ViTLayer] = ModulePolicyDescription(attribute_replacement={ "attention.attention.num_attention_heads": @@ -44,45 +43,47 @@ class ViTPolicy(Policy): "attention.attention.all_head_size": self.model.config.hidden_size // self.shard_config.tensor_parallel_size, }, - param_replacement=[], - sub_module_replacement=[ - SubModuleReplacementDescription( - suffix="attention.attention.query", - target_module=col_nn.Linear1D_Col, - ), - SubModuleReplacementDescription( - suffix="attention.attention.key", - target_module=col_nn.Linear1D_Col, - ), - SubModuleReplacementDescription( - suffix="attention.attention.value", - target_module=col_nn.Linear1D_Col, - ), - SubModuleReplacementDescription( - suffix="attention.attention.dropout", - target_module=col_nn.DropoutForParallelInput, - ), - SubModuleReplacementDescription( - suffix="attention.output.dense", - target_module=col_nn.Linear1D_Row, - ), - SubModuleReplacementDescription( - suffix="attention.output.dropout", - target_module=col_nn.DropoutForReplicatedInput, - ), - SubModuleReplacementDescription( - suffix="intermediate.dense", - target_module=col_nn.Linear1D_Col, - ), - SubModuleReplacementDescription( - suffix="output.dense", - target_module=col_nn.Linear1D_Row, - ), - SubModuleReplacementDescription( - suffix="output.dropout", - target_module=col_nn.DropoutForReplicatedInput, - ), - ]) + param_replacement=[], + sub_module_replacement=[ + SubModuleReplacementDescription( + suffix="attention.attention.query", + target_module=col_nn.Linear1D_Col, + ), + SubModuleReplacementDescription( + suffix="attention.attention.key", + target_module=col_nn.Linear1D_Col, + ), + SubModuleReplacementDescription( + suffix="attention.attention.value", + target_module=col_nn.Linear1D_Col, + ), + SubModuleReplacementDescription( + suffix="attention.attention.dropout", + target_module=col_nn.DropoutForParallelInput, + ), + SubModuleReplacementDescription( + suffix="attention.output.dense", + target_module=col_nn.Linear1D_Row, + ), + SubModuleReplacementDescription( + suffix="attention.output.dropout", + target_module=col_nn.DropoutForReplicatedInput, + ), + SubModuleReplacementDescription( + suffix="intermediate.dense", + target_module=col_nn.Linear1D_Col, + ), + SubModuleReplacementDescription( + suffix="output.dense", + target_module=col_nn.Linear1D_Row, + ), + SubModuleReplacementDescription( + suffix="output.dropout", + target_module=col_nn.DropoutForReplicatedInput, + ), + ]) + + return policy return policy