CSS-LM

Форк
0
/
configuration_bart.py 
148 строк · 5.6 Кб
1
# coding=utf-8
2
# Copyright 2020 The Fairseq Authors and The HuggingFace Inc. team.
3
#
4
# Licensed under the Apache License, Version 2.0 (the "License");
5
# you may not use this file except in compliance with the License.
6
# You may obtain a copy of the License at
7
#
8
#     http://www.apache.org/licenses/LICENSE-2.0
9
#
10
# Unless required by applicable law or agreed to in writing, software
11
# distributed under the License is distributed on an "AS IS" BASIS,
12
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
# See the License for the specific language governing permissions and
14
# limitations under the License.
15
""" BART configuration """
16

17

18
import logging
19

20
from .configuration_utils import PretrainedConfig
21

22

23
logger = logging.getLogger(__name__)
24

25
BART_PRETRAINED_CONFIG_ARCHIVE_MAP = {
26
    "facebook/bart-base": "https://s3.amazonaws.com/models.huggingface.co/bert/facebook/bart-base/config.json",
27
    "facebook/bart-large": "https://s3.amazonaws.com/models.huggingface.co/bert/facebook/bart-large/config.json",
28
    "facebook/bart-large-mnli": "https://s3.amazonaws.com/models.huggingface.co/bert/facebook/bart-large-mnli/config.json",
29
    "facebook/bart-large-cnn": "https://s3.amazonaws.com/models.huggingface.co/bert/facebook/bart-large-cnn/config.json",
30
    "facebook/bart-large-xsum": "https://s3.amazonaws.com/models.huggingface.co/bert/facebook/bart-large-xsum/config.json",
31
    "facebook/mbart-large-en-ro": "https://s3.amazonaws.com/models.huggingface.co/bert/facebook/mbart-large-en-ro/config.json",
32
    "yjernite/bart_eli5": "https://s3.amazonaws.com/models.huggingface.co/bert/yjernite/bart_eli5/config.json",
33
}
34

35

36
class BartConfig(PretrainedConfig):
37
    r"""
38
        Configuration class for Bart. Parameters are renamed from the fairseq implementation
39
    """
40
    model_type = "bart"
41

42
    def __init__(
43
        self,
44
        activation_dropout=0.0,
45
        extra_pos_embeddings=2,
46
        activation_function="gelu",
47
        vocab_size=50265,
48
        d_model=1024,
49
        encoder_ffn_dim=4096,
50
        encoder_layers=12,
51
        encoder_attention_heads=16,
52
        decoder_ffn_dim=4096,
53
        decoder_layers=12,
54
        decoder_attention_heads=16,
55
        encoder_layerdrop=0.0,
56
        decoder_layerdrop=0.0,
57
        attention_dropout=0.0,
58
        dropout=0.1,
59
        max_position_embeddings=1024,
60
        init_std=0.02,
61
        classifier_dropout=0.0,
62
        num_labels=3,
63
        is_encoder_decoder=True,
64
        pad_token_id=1,
65
        bos_token_id=0,
66
        eos_token_id=2,
67
        normalize_before=False,
68
        add_final_layer_norm=False,
69
        scale_embedding=False,
70
        normalize_embedding=True,
71
        static_position_embeddings=False,
72
        add_bias_logits=False,
73
        **common_kwargs
74
    ):
75
        r"""
76
            :class:`~transformers.BartConfig` is the configuration class for `BartModel`.
77

78
            Examples::
79

80
                >>> from transformers import BartConfig, BartModel
81

82
                >>> config = BartConfig.from_pretrained('facebook/bart-large')
83
                >>> model = BartModel(config)
84
        """
85
        if "hidden_size" in common_kwargs:
86
            raise ValueError("hidden size is called d_model")
87
        super().__init__(
88
            num_labels=num_labels,
89
            pad_token_id=pad_token_id,
90
            bos_token_id=bos_token_id,
91
            eos_token_id=eos_token_id,
92
            is_encoder_decoder=is_encoder_decoder,
93
            **common_kwargs,
94
        )
95
        self.vocab_size = vocab_size
96
        self.d_model = d_model  # encoder_embed_dim and decoder_embed_dim
97
        self.encoder_ffn_dim = encoder_ffn_dim
98
        self.encoder_layers = self.num_hidden_layers = encoder_layers
99
        self.encoder_attention_heads = encoder_attention_heads
100
        self.encoder_layerdrop = encoder_layerdrop
101
        self.decoder_layerdrop = decoder_layerdrop
102
        self.decoder_ffn_dim = decoder_ffn_dim
103
        self.decoder_layers = decoder_layers
104
        self.decoder_attention_heads = decoder_attention_heads
105
        self.max_position_embeddings = max_position_embeddings
106
        self.init_std = init_std  # Normal(0, this parameter)
107
        self.activation_function = activation_function
108

109
        # Params introduced for Mbart
110
        self.scale_embedding = scale_embedding  # scale factor will be sqrt(d_model) if True
111
        self.normalize_embedding = normalize_embedding  # True for mbart, False otherwise
112
        self.normalize_before = normalize_before  # combo of fairseq's encoder_ and decoder_normalize_before
113
        self.add_final_layer_norm = add_final_layer_norm
114

115
        # Params introduced for Marian
116
        self.add_bias_logits = add_bias_logits
117
        self.static_position_embeddings = static_position_embeddings
118

119
        # 3 Types of Dropout
120
        self.attention_dropout = attention_dropout
121
        self.activation_dropout = activation_dropout
122
        self.dropout = dropout
123

124
        # Classifier stuff
125
        self.classif_dropout = classifier_dropout
126

127
        # pos embedding offset
128
        self.extra_pos_embeddings = self.pad_token_id + 1
129

130
    @property
131
    def num_attention_heads(self) -> int:
132
        return self.encoder_attention_heads
133

134
    @property
135
    def hidden_size(self) -> int:
136
        return self.d_model
137

138
    def is_valid_mbart(self) -> bool:
139
        """Is the configuration aligned with the MBART paper."""
140
        if self.normalize_before and self.add_final_layer_norm and self.scale_embedding:
141
            return True
142
        if self.normalize_before or self.add_final_layer_norm or self.scale_embedding:
143
            logger.info("This configuration is a mixture of MBART and BART settings")
144
        return False
145

146

147
class MBartConfig(BartConfig):
148
    model_type = "mbart"
149

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.