CSS-LM
68 строк · 3.3 Кб
1# coding=utf-8
2# Copyright 2020 The Allen Institute for AI team and The HuggingFace Inc. team.
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15""" Longformer configuration """
16
17import logging
18from typing import List, Union
19
20from .configuration_roberta import RobertaConfig
21
22
23logger = logging.getLogger(__name__)
24
25LONGFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP = {
26"allenai/longformer-base-4096": "https://s3.amazonaws.com/models.huggingface.co/bert/allenai/longformer-base-4096/config.json",
27"allenai/longformer-large-4096": "https://s3.amazonaws.com/models.huggingface.co/bert/allenai/longformer-large-4096/config.json",
28"allenai/longformer-large-4096-finetuned-triviaqa": "https://s3.amazonaws.com/models.huggingface.co/bert/allenai/longformer-large-4096-finetuned-triviaqa/config.json",
29"allenai/longformer-base-4096-extra.pos.embd.only": "https://s3.amazonaws.com/models.huggingface.co/bert/allenai/longformer-base-4096-extra.pos.embd.only/config.json",
30"allenai/longformer-large-4096-extra.pos.embd.only": "https://s3.amazonaws.com/models.huggingface.co/bert/allenai/longformer-large-4096-extra.pos.embd.only/config.json",
31}
32
33
34class LongformerConfig(RobertaConfig):
35r"""
36This is the configuration class to store the configuration of a :class:`~transformers.LongformerModel`.
37It is used to instantiate an Longformer model according to the specified arguments, defining the model
38architecture. Instantiating a configuration with the defaults will yield a similar configuration to that of
39the RoBERTa `roberta-base <https://huggingface.co/roberta-base>`__ architecture with a sequence length 4,096.
40
41The :class:`~transformers.LongformerConfig` class directly inherits :class:`~transformers.RobertaConfig`.
42It reuses the same defaults. Please check the parent class for more information.
43
44Args:
45attention_window (:obj:`int` or :obj:`List[int]`, optional, defaults to 512):
46Size of an attention window around each token. If :obj:`int`, use the same size for all layers.
47To specify a different window size for each layer, use a :obj:`List[int]` where
48``len(attention_window) == num_hidden_layers``.
49
50Example::
51
52>>> from transformers import LongformerConfig, LongformerModel
53
54>>> # Initializing a Longformer configuration
55>>> configuration = LongformerConfig()
56
57>>> # Initializing a model from the configuration
58>>> model = LongformerModel(configuration)
59
60>>> # Accessing the model configuration
61>>> configuration = model.config
62"""
63model_type = "longformer"
64
65def __init__(self, attention_window: Union[List[int], int] = 512, sep_token_id: int = 2, **kwargs):
66super().__init__(**kwargs)
67self.attention_window = attention_window
68self.sep_token_id = sep_token_id
69