add rotary config in configuration_internlm.py

pull/470/head
YWMditto 2023-11-06 20:30:34 +08:00
parent 845cccd756
commit ec88e35306
1 changed files with 7 additions and 6 deletions

View File

@ -19,9 +19,8 @@
# limitations under the License.
""" InternLM model configuration"""
from transformers.utils import logging
from transformers.configuration_utils import PretrainedConfig
from transformers.utils import logging
logger = logging.get_logger(__name__)
@ -30,9 +29,9 @@ INTERNLM_PRETRAINED_CONFIG_ARCHIVE_MAP = {}
class InternLMConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`InternLMModel`]. It is used to instantiate an InternLM
model according to the specified arguments, defining the model architecture. Instantiating a configuration with the
defaults will yield a similar configuration to that of the InternLM-7B.
This is the configuration class to store the configuration of a [`InternLMModel`]. It is used to instantiate
an InternLM model according to the specified arguments, defining the model architecture. Instantiating a
configuration with the defaults will yield a similar configuration to that of the InternLM-7B.
Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the
documentation from [`PretrainedConfig`] for more information.
@ -81,7 +80,7 @@ class InternLMConfig(PretrainedConfig):
model_type = "internlm"
_auto_class = "AutoConfig"
def __init__(
def __init__( # pylint: disable=W0102
self,
vocab_size=103168,
hidden_size=4096,
@ -98,6 +97,7 @@ class InternLMConfig(PretrainedConfig):
eos_token_id=2,
tie_word_embeddings=False,
bias=True,
rotary={"base": 10000, "type": "dynamic"}, # pylint: disable=W0102
**kwargs,
):
self.vocab_size = vocab_size
@ -111,6 +111,7 @@ class InternLMConfig(PretrainedConfig):
self.rms_norm_eps = rms_norm_eps
self.use_cache = use_cache
self.bias = bias
self.rotary = rotary
super().__init__(
pad_token_id=pad_token_id,
bos_token_id=bos_token_id,