transformers

Форк
0
/
gen-card-allenai-wmt19.py 
153 строки · 4.4 Кб
1
#!/usr/bin/env python
2
# Copyright 2020 The HuggingFace Team. All rights reserved.
3
#
4
# Licensed under the Apache License, Version 2.0 (the "License");
5
# you may not use this file except in compliance with the License.
6
# You may obtain a copy of the License at
7
#
8
#     http://www.apache.org/licenses/LICENSE-2.0
9
#
10
# Unless required by applicable law or agreed to in writing, software
11
# distributed under the License is distributed on an "AS IS" BASIS,
12
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
# See the License for the specific language governing permissions and
14
# limitations under the License.
15

16
# Usage:
17
# ./gen-card-allenai-wmt19.py
18

19
import os
20
from pathlib import Path
21

22

23
def write_model_card(model_card_dir, src_lang, tgt_lang, model_name):
24

25
    texts = {
26
        "en": "Machine learning is great, isn't it?",
27
        "ru": "Машинное обучение - это здорово, не так ли?",
28
        "de": "Maschinelles Lernen ist großartig, nicht wahr?",
29
    }
30

31
    # BLUE scores as follows:
32
    # "pair": [fairseq, transformers]
33
    scores = {
34
        "wmt19-de-en-6-6-base": [0, 38.37],
35
        "wmt19-de-en-6-6-big": [0, 39.90],
36
    }
37
    pair = f"{src_lang}-{tgt_lang}"
38

39
    readme = f"""
40
---
41

42
language:
43
- {src_lang}
44
- {tgt_lang}
45
thumbnail:
46
tags:
47
- translation
48
- wmt19
49
- allenai
50
license: apache-2.0
51
datasets:
52
- wmt19
53
metrics:
54
- bleu
55
---
56

57
# FSMT
58

59
## Model description
60

61
This is a ported version of fairseq-based [wmt19 transformer](https://github.com/jungokasai/deep-shallow/) for {src_lang}-{tgt_lang}.
62

63
For more details, please, see [Deep Encoder, Shallow Decoder: Reevaluating the Speed-Quality Tradeoff in Machine Translation](https://arxiv.org/abs/2006.10369).
64

65
2 models are available:
66

67
* [wmt19-de-en-6-6-big](https://huggingface.co/allenai/wmt19-de-en-6-6-big)
68
* [wmt19-de-en-6-6-base](https://huggingface.co/allenai/wmt19-de-en-6-6-base)
69

70

71
## Intended uses & limitations
72

73
#### How to use
74

75
```python
76
from transformers import FSMTForConditionalGeneration, FSMTTokenizer
77
mname = "allenai/{model_name}"
78
tokenizer = FSMTTokenizer.from_pretrained(mname)
79
model = FSMTForConditionalGeneration.from_pretrained(mname)
80

81
input = "{texts[src_lang]}"
82
input_ids = tokenizer.encode(input, return_tensors="pt")
83
outputs = model.generate(input_ids)
84
decoded = tokenizer.decode(outputs[0], skip_special_tokens=True)
85
print(decoded) # {texts[tgt_lang]}
86

87
```
88

89
#### Limitations and bias
90

91

92
## Training data
93

94
Pretrained weights were left identical to the original model released by allenai. For more details, please, see the [paper](https://arxiv.org/abs/2006.10369).
95

96
## Eval results
97

98
Here are the BLEU scores:
99

100
model   |  transformers
101
-------|---------
102
{model_name}  |  {scores[model_name][1]}
103

104
The score was calculated using this code:
105

106
```bash
107
git clone https://github.com/huggingface/transformers
108
cd transformers
109
export PAIR={pair}
110
export DATA_DIR=data/$PAIR
111
export SAVE_DIR=data/$PAIR
112
export BS=8
113
export NUM_BEAMS=5
114
mkdir -p $DATA_DIR
115
sacrebleu -t wmt19 -l $PAIR --echo src > $DATA_DIR/val.source
116
sacrebleu -t wmt19 -l $PAIR --echo ref > $DATA_DIR/val.target
117
echo $PAIR
118
PYTHONPATH="src:examples/seq2seq" python examples/seq2seq/run_eval.py allenai/{model_name} $DATA_DIR/val.source $SAVE_DIR/test_translations.txt --reference_path $DATA_DIR/val.target --score_path $SAVE_DIR/test_bleu.json --bs $BS --task translation --num_beams $NUM_BEAMS
119
```
120

121
## Data Sources
122

123
- [training, etc.](http://www.statmt.org/wmt19/)
124
- [test set](http://matrix.statmt.org/test_sets/newstest2019.tgz?1556572561)
125

126

127
### BibTeX entry and citation info
128

129
```
130
@misc{{kasai2020deep,
131
    title={{Deep Encoder, Shallow Decoder: Reevaluating the Speed-Quality Tradeoff in Machine Translation}},
132
    author={{Jungo Kasai and Nikolaos Pappas and Hao Peng and James Cross and Noah A. Smith}},
133
    year={{2020}},
134
    eprint={{2006.10369}},
135
    archivePrefix={{arXiv}},
136
    primaryClass={{cs.CL}}
137
}}
138
```
139

140
"""
141
    model_card_dir.mkdir(parents=True, exist_ok=True)
142
    path = os.path.join(model_card_dir, "README.md")
143
    print(f"Generating {path}")
144
    with open(path, "w", encoding="utf-8") as f:
145
        f.write(readme)
146

147
# make sure we are under the root of the project
148
repo_dir = Path(__file__).resolve().parent.parent.parent
149
model_cards_dir = repo_dir / "model_cards"
150

151
for model_name in ["wmt19-de-en-6-6-base", "wmt19-de-en-6-6-big"]:
152
    model_card_dir = model_cards_dir / "allenai" / model_name
153
    write_model_card(model_card_dir, src_lang="de", tgt_lang="en", model_name=model_name)
154

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.