mirror of https://github.com/hpcaitech/ColossalAI
[shardformer/sequence parallel] not support opt of seq-parallel, add warning and fix a bug in gpt2 pp (#4488)
parent
5545114fd8
commit
351351a36e
|
@ -1,3 +1,4 @@
|
|||
import warnings
|
||||
from functools import partial
|
||||
from typing import Callable, Dict, List
|
||||
|
||||
|
@ -39,6 +40,9 @@ class OPTPolicy(Policy):
|
|||
from transformers.models.opt.modeling_opt import OPTAttention, OPTDecoder, OPTDecoderLayer
|
||||
|
||||
policy = {}
|
||||
if self.shard_config.enable_sequence_parallelism:
|
||||
self.shard_config.enable_sequence_parallelism = False
|
||||
warnings.warn("OPT dosen't support sequence parallelism now, will ignore the sequence parallelism flag.")
|
||||
|
||||
if self.shard_config.enable_tensor_parallelism:
|
||||
policy[OPTDecoder] = ModulePolicyDescription(sub_module_replacement=[
|
||||
|
|
Loading…
Reference in New Issue