Gabriele Sarti
commited on
Commit
•
0fcb40e
1
Parent(s):
8029b54
Initial commit
Browse files- README.md +25 -3
- config.json +45 -0
- opus+bt.spm32k-spm32k.transformer-align.train1.log +1073 -0
- opus+bt.spm32k-spm32k.transformer-align.valid1.log +35 -0
- pytorch_model.bin +3 -0
- special_tokens_map.json +5 -0
- tokenizer_config.json +13 -0
- vocab.json +0 -0
README.md
CHANGED
@@ -1,3 +1,25 @@
|
|
1 |
-
|
2 |
-
|
3 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# opus+bt-2021-04-14.zip
|
2 |
+
|
3 |
+
* dataset: opus+bt
|
4 |
+
* model: transformer-align
|
5 |
+
* source language(s): eng
|
6 |
+
* target language(s): rus
|
7 |
+
* model: transformer-align
|
8 |
+
* pre-processing: normalization + SentencePiece (spm32k,spm32k)
|
9 |
+
* download: [opus+bt-2021-04-14.zip](https://object.pouta.csc.fi/Tatoeba-MT-models/eng-rus/opus+bt-2021-04-14.zip)
|
10 |
+
* test set translations: [opus+bt-2021-04-14.test.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/eng-rus/opus+bt-2021-04-14.test.txt)
|
11 |
+
* test set scores: [opus+bt-2021-04-14.eval.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/eng-rus/opus+bt-2021-04-14.eval.txt)
|
12 |
+
|
13 |
+
## Benchmarks
|
14 |
+
|
15 |
+
| testset | BLEU | chr-F | #sent | #words | BP |
|
16 |
+
|---------|-------|-------|-------|--------|----|
|
17 |
+
| newstest2012.eng-rus | 31.2 | 0.588 | 3003 | 64830 | 0.980 |
|
18 |
+
| newstest2013.eng-rus | 23.2 | 0.518 | 3000 | 58560 | 0.974 |
|
19 |
+
| newstest2015-enru.eng-rus | 29.1 | 0.581 | 2818 | 55915 | 1.000 |
|
20 |
+
| newstest2016-enru.eng-rus | 27.4 | 0.565 | 2998 | 62018 | 0.993 |
|
21 |
+
| newstest2017-enru.eng-rus | 30.8 | 0.592 | 3001 | 60255 | 0.998 |
|
22 |
+
| newstest2018-enru.eng-rus | 27.4 | 0.572 | 3000 | 61920 | 1.000 |
|
23 |
+
| newstest2019-enru.eng-rus | 27.1 | 0.540 | 1997 | 48153 | 0.927 |
|
24 |
+
| Tatoeba-test.eng-rus | 45.6 | 0.652 | 10000 | 66872 | 0.987 |
|
25 |
+
| tico19-test.eng-rus | 27.5 | 0.556 | 2100 | 55837 | 0.927 |
|
config.json
ADDED
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"activation_dropout": 0.0,
|
3 |
+
"activation_function": "swish",
|
4 |
+
"architectures": [
|
5 |
+
"MarianMTModel"
|
6 |
+
],
|
7 |
+
"attention_dropout": 0.0,
|
8 |
+
"bad_words_ids": [
|
9 |
+
[
|
10 |
+
60878
|
11 |
+
]
|
12 |
+
],
|
13 |
+
"bos_token_id": 0,
|
14 |
+
"classifier_dropout": 0.0,
|
15 |
+
"d_model": 512,
|
16 |
+
"decoder_attention_heads": 8,
|
17 |
+
"decoder_ffn_dim": 2048,
|
18 |
+
"decoder_layerdrop": 0.0,
|
19 |
+
"decoder_layers": 6,
|
20 |
+
"decoder_start_token_id": 60878,
|
21 |
+
"decoder_vocab_size": 60879,
|
22 |
+
"dropout": 0.1,
|
23 |
+
"encoder_attention_heads": 8,
|
24 |
+
"encoder_ffn_dim": 2048,
|
25 |
+
"encoder_layerdrop": 0.0,
|
26 |
+
"encoder_layers": 6,
|
27 |
+
"eos_token_id": 25377,
|
28 |
+
"forced_eos_token_id": 25377,
|
29 |
+
"init_std": 0.02,
|
30 |
+
"is_encoder_decoder": true,
|
31 |
+
"max_length": 512,
|
32 |
+
"max_position_embeddings": 512,
|
33 |
+
"model_type": "marian",
|
34 |
+
"normalize_embedding": false,
|
35 |
+
"num_beams": 6,
|
36 |
+
"num_hidden_layers": 6,
|
37 |
+
"pad_token_id": 60878,
|
38 |
+
"scale_embedding": true,
|
39 |
+
"share_encoder_decoder_embeddings": true,
|
40 |
+
"static_position_embeddings": true,
|
41 |
+
"torch_dtype": "float16",
|
42 |
+
"transformers_version": "4.21.0.dev0",
|
43 |
+
"use_cache": true,
|
44 |
+
"vocab_size": 60879
|
45 |
+
}
|
opus+bt.spm32k-spm32k.transformer-align.train1.log
ADDED
@@ -0,0 +1,1073 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[2021-03-28 22:24:02] [marian] Marian v1.10.0 6f6d484 2021-02-06 15:35:16 -0800
|
2 |
+
[2021-03-28 22:24:02] [marian] Running on r18g02.bullx as process 135320 with command line:
|
3 |
+
[2021-03-28 22:24:02] [marian] /projappl/project_2001194/marian/build/marian --guided-alignment /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/train/opus+bt.spm32k-spm32k.src-trg.alg.gz --early-stopping 15 --valid-freq 10000 --valid-sets /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/val/Tatoeba-dev.src.spm32k /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/val/Tatoeba-dev.trg.spm32k --valid-metrics perplexity --valid-mini-batch 16 --valid-log /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.valid1.log --beam-size 12 --normalize 1 --allow-unk --overwrite --keep-best --model /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz --type transformer --train-sets /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/train/opus+bt.src.clean.spm32k.gz /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/train/opus+bt.trg.clean.spm32k.gz --max-length 500 --vocabs /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.vocab.yml /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.vocab.yml --mini-batch-fit -w 24000 --maxi-batch 500 --save-freq 10000 --disp-freq 10000 --log /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.train1.log --enc-depth 6 --dec-depth 6 --transformer-heads 8 --transformer-postprocess-emb d --transformer-postprocess dan --transformer-dropout 0.1 --label-smoothing 0.1 --learn-rate 0.0003 --lr-warmup 16000 --lr-decay-inv-sqrt 16000 --lr-report --optimizer-params 0.9 0.98 1e-09 --clip-norm 5 --fp16 --tied-embeddings-all --devices 0 1 2 3 --sync-sgd --seed 1111 --sqlite --tempdir /run/nvme/job_5338591/data --exponential-smoothing
|
4 |
+
[2021-03-28 22:24:02] [config] after: 0e
|
5 |
+
[2021-03-28 22:24:02] [config] after-batches: 0
|
6 |
+
[2021-03-28 22:24:02] [config] after-epochs: 0
|
7 |
+
[2021-03-28 22:24:02] [config] all-caps-every: 0
|
8 |
+
[2021-03-28 22:24:02] [config] allow-unk: true
|
9 |
+
[2021-03-28 22:24:02] [config] authors: false
|
10 |
+
[2021-03-28 22:24:02] [config] beam-size: 12
|
11 |
+
[2021-03-28 22:24:02] [config] bert-class-symbol: "[CLS]"
|
12 |
+
[2021-03-28 22:24:02] [config] bert-mask-symbol: "[MASK]"
|
13 |
+
[2021-03-28 22:24:02] [config] bert-masking-fraction: 0.15
|
14 |
+
[2021-03-28 22:24:02] [config] bert-sep-symbol: "[SEP]"
|
15 |
+
[2021-03-28 22:24:02] [config] bert-train-type-embeddings: true
|
16 |
+
[2021-03-28 22:24:02] [config] bert-type-vocab-size: 2
|
17 |
+
[2021-03-28 22:24:02] [config] build-info: ""
|
18 |
+
[2021-03-28 22:24:02] [config] cite: false
|
19 |
+
[2021-03-28 22:24:02] [config] clip-norm: 5
|
20 |
+
[2021-03-28 22:24:02] [config] cost-scaling:
|
21 |
+
[2021-03-28 22:24:02] [config] - 7
|
22 |
+
[2021-03-28 22:24:02] [config] - 2000
|
23 |
+
[2021-03-28 22:24:02] [config] - 2
|
24 |
+
[2021-03-28 22:24:02] [config] - 0.05
|
25 |
+
[2021-03-28 22:24:02] [config] - 10
|
26 |
+
[2021-03-28 22:24:02] [config] - 1
|
27 |
+
[2021-03-28 22:24:02] [config] cost-type: ce-sum
|
28 |
+
[2021-03-28 22:24:02] [config] cpu-threads: 0
|
29 |
+
[2021-03-28 22:24:02] [config] data-weighting: ""
|
30 |
+
[2021-03-28 22:24:02] [config] data-weighting-type: sentence
|
31 |
+
[2021-03-28 22:24:02] [config] dec-cell: gru
|
32 |
+
[2021-03-28 22:24:02] [config] dec-cell-base-depth: 2
|
33 |
+
[2021-03-28 22:24:02] [config] dec-cell-high-depth: 1
|
34 |
+
[2021-03-28 22:24:02] [config] dec-depth: 6
|
35 |
+
[2021-03-28 22:24:02] [config] devices:
|
36 |
+
[2021-03-28 22:24:02] [config] - 0
|
37 |
+
[2021-03-28 22:24:02] [config] - 1
|
38 |
+
[2021-03-28 22:24:02] [config] - 2
|
39 |
+
[2021-03-28 22:24:02] [config] - 3
|
40 |
+
[2021-03-28 22:24:02] [config] dim-emb: 512
|
41 |
+
[2021-03-28 22:24:02] [config] dim-rnn: 1024
|
42 |
+
[2021-03-28 22:24:02] [config] dim-vocabs:
|
43 |
+
[2021-03-28 22:24:02] [config] - 0
|
44 |
+
[2021-03-28 22:24:02] [config] - 0
|
45 |
+
[2021-03-28 22:24:02] [config] disp-first: 0
|
46 |
+
[2021-03-28 22:24:02] [config] disp-freq: 10000
|
47 |
+
[2021-03-28 22:24:02] [config] disp-label-counts: true
|
48 |
+
[2021-03-28 22:24:02] [config] dropout-rnn: 0
|
49 |
+
[2021-03-28 22:24:02] [config] dropout-src: 0
|
50 |
+
[2021-03-28 22:24:02] [config] dropout-trg: 0
|
51 |
+
[2021-03-28 22:24:02] [config] dump-config: ""
|
52 |
+
[2021-03-28 22:24:02] [config] early-stopping: 15
|
53 |
+
[2021-03-28 22:24:02] [config] embedding-fix-src: false
|
54 |
+
[2021-03-28 22:24:02] [config] embedding-fix-trg: false
|
55 |
+
[2021-03-28 22:24:02] [config] embedding-normalization: false
|
56 |
+
[2021-03-28 22:24:02] [config] embedding-vectors:
|
57 |
+
[2021-03-28 22:24:02] [config] []
|
58 |
+
[2021-03-28 22:24:02] [config] enc-cell: gru
|
59 |
+
[2021-03-28 22:24:02] [config] enc-cell-depth: 1
|
60 |
+
[2021-03-28 22:24:02] [config] enc-depth: 6
|
61 |
+
[2021-03-28 22:24:02] [config] enc-type: bidirectional
|
62 |
+
[2021-03-28 22:24:02] [config] english-title-case-every: 0
|
63 |
+
[2021-03-28 22:24:02] [config] exponential-smoothing: 0.0001
|
64 |
+
[2021-03-28 22:24:02] [config] factor-weight: 1
|
65 |
+
[2021-03-28 22:24:02] [config] grad-dropping-momentum: 0
|
66 |
+
[2021-03-28 22:24:02] [config] grad-dropping-rate: 0
|
67 |
+
[2021-03-28 22:24:02] [config] grad-dropping-warmup: 100
|
68 |
+
[2021-03-28 22:24:02] [config] gradient-checkpointing: false
|
69 |
+
[2021-03-28 22:24:02] [config] guided-alignment: /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/train/opus+bt.spm32k-spm32k.src-trg.alg.gz
|
70 |
+
[2021-03-28 22:24:02] [config] guided-alignment-cost: mse
|
71 |
+
[2021-03-28 22:24:02] [config] guided-alignment-weight: 0.1
|
72 |
+
[2021-03-28 22:24:02] [config] ignore-model-config: false
|
73 |
+
[2021-03-28 22:24:02] [config] input-types:
|
74 |
+
[2021-03-28 22:24:02] [config] []
|
75 |
+
[2021-03-28 22:24:02] [config] interpolate-env-vars: false
|
76 |
+
[2021-03-28 22:24:02] [config] keep-best: true
|
77 |
+
[2021-03-28 22:24:02] [config] label-smoothing: 0.1
|
78 |
+
[2021-03-28 22:24:02] [config] layer-normalization: false
|
79 |
+
[2021-03-28 22:24:02] [config] learn-rate: 0.0003
|
80 |
+
[2021-03-28 22:24:02] [config] lemma-dim-emb: 0
|
81 |
+
[2021-03-28 22:24:02] [config] log: /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.train1.log
|
82 |
+
[2021-03-28 22:24:02] [config] log-level: info
|
83 |
+
[2021-03-28 22:24:02] [config] log-time-zone: ""
|
84 |
+
[2021-03-28 22:24:02] [config] logical-epoch:
|
85 |
+
[2021-03-28 22:24:02] [config] - 1e
|
86 |
+
[2021-03-28 22:24:02] [config] - 0
|
87 |
+
[2021-03-28 22:24:02] [config] lr-decay: 0
|
88 |
+
[2021-03-28 22:24:02] [config] lr-decay-freq: 50000
|
89 |
+
[2021-03-28 22:24:02] [config] lr-decay-inv-sqrt:
|
90 |
+
[2021-03-28 22:24:02] [config] - 16000
|
91 |
+
[2021-03-28 22:24:02] [config] lr-decay-repeat-warmup: false
|
92 |
+
[2021-03-28 22:24:02] [config] lr-decay-reset-optimizer: false
|
93 |
+
[2021-03-28 22:24:02] [config] lr-decay-start:
|
94 |
+
[2021-03-28 22:24:02] [config] - 10
|
95 |
+
[2021-03-28 22:24:02] [config] - 1
|
96 |
+
[2021-03-28 22:24:02] [config] lr-decay-strategy: epoch+stalled
|
97 |
+
[2021-03-28 22:24:02] [config] lr-report: true
|
98 |
+
[2021-03-28 22:24:02] [config] lr-warmup: 16000
|
99 |
+
[2021-03-28 22:24:02] [config] lr-warmup-at-reload: false
|
100 |
+
[2021-03-28 22:24:02] [config] lr-warmup-cycle: false
|
101 |
+
[2021-03-28 22:24:02] [config] lr-warmup-start-rate: 0
|
102 |
+
[2021-03-28 22:24:02] [config] max-length: 500
|
103 |
+
[2021-03-28 22:24:02] [config] max-length-crop: false
|
104 |
+
[2021-03-28 22:24:02] [config] max-length-factor: 3
|
105 |
+
[2021-03-28 22:24:02] [config] maxi-batch: 500
|
106 |
+
[2021-03-28 22:24:02] [config] maxi-batch-sort: trg
|
107 |
+
[2021-03-28 22:24:02] [config] mini-batch: 64
|
108 |
+
[2021-03-28 22:24:02] [config] mini-batch-fit: true
|
109 |
+
[2021-03-28 22:24:02] [config] mini-batch-fit-step: 10
|
110 |
+
[2021-03-28 22:24:02] [config] mini-batch-track-lr: false
|
111 |
+
[2021-03-28 22:24:02] [config] mini-batch-warmup: 0
|
112 |
+
[2021-03-28 22:24:02] [config] mini-batch-words: 0
|
113 |
+
[2021-03-28 22:24:02] [config] mini-batch-words-ref: 0
|
114 |
+
[2021-03-28 22:24:02] [config] model: /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
115 |
+
[2021-03-28 22:24:02] [config] multi-loss-type: sum
|
116 |
+
[2021-03-28 22:24:02] [config] multi-node: false
|
117 |
+
[2021-03-28 22:24:02] [config] multi-node-overlap: true
|
118 |
+
[2021-03-28 22:24:02] [config] n-best: false
|
119 |
+
[2021-03-28 22:24:02] [config] no-nccl: false
|
120 |
+
[2021-03-28 22:24:02] [config] no-reload: false
|
121 |
+
[2021-03-28 22:24:02] [config] no-restore-corpus: false
|
122 |
+
[2021-03-28 22:24:02] [config] normalize: 1
|
123 |
+
[2021-03-28 22:24:02] [config] normalize-gradient: false
|
124 |
+
[2021-03-28 22:24:02] [config] num-devices: 0
|
125 |
+
[2021-03-28 22:24:02] [config] optimizer: adam
|
126 |
+
[2021-03-28 22:24:02] [config] optimizer-delay: 1
|
127 |
+
[2021-03-28 22:24:02] [config] optimizer-params:
|
128 |
+
[2021-03-28 22:24:02] [config] - 0.9
|
129 |
+
[2021-03-28 22:24:02] [config] - 0.98
|
130 |
+
[2021-03-28 22:24:02] [config] - 1e-09
|
131 |
+
[2021-03-28 22:24:02] [config] output-omit-bias: false
|
132 |
+
[2021-03-28 22:24:02] [config] overwrite: true
|
133 |
+
[2021-03-28 22:24:02] [config] precision:
|
134 |
+
[2021-03-28 22:24:02] [config] - float16
|
135 |
+
[2021-03-28 22:24:02] [config] - float32
|
136 |
+
[2021-03-28 22:24:02] [config] - float32
|
137 |
+
[2021-03-28 22:24:02] [config] pretrained-model: ""
|
138 |
+
[2021-03-28 22:24:02] [config] quantize-biases: false
|
139 |
+
[2021-03-28 22:24:02] [config] quantize-bits: 0
|
140 |
+
[2021-03-28 22:24:02] [config] quantize-log-based: false
|
141 |
+
[2021-03-28 22:24:02] [config] quantize-optimization-steps: 0
|
142 |
+
[2021-03-28 22:24:02] [config] quiet: false
|
143 |
+
[2021-03-28 22:24:02] [config] quiet-translation: false
|
144 |
+
[2021-03-28 22:24:02] [config] relative-paths: false
|
145 |
+
[2021-03-28 22:24:02] [config] right-left: false
|
146 |
+
[2021-03-28 22:24:02] [config] save-freq: 10000
|
147 |
+
[2021-03-28 22:24:02] [config] seed: 1111
|
148 |
+
[2021-03-28 22:24:02] [config] sentencepiece-alphas:
|
149 |
+
[2021-03-28 22:24:02] [config] []
|
150 |
+
[2021-03-28 22:24:02] [config] sentencepiece-max-lines: 2000000
|
151 |
+
[2021-03-28 22:24:02] [config] sentencepiece-options: ""
|
152 |
+
[2021-03-28 22:24:02] [config] shuffle: data
|
153 |
+
[2021-03-28 22:24:02] [config] shuffle-in-ram: false
|
154 |
+
[2021-03-28 22:24:02] [config] sigterm: save-and-exit
|
155 |
+
[2021-03-28 22:24:02] [config] skip: false
|
156 |
+
[2021-03-28 22:24:02] [config] sqlite: temporary
|
157 |
+
[2021-03-28 22:24:02] [config] sqlite-drop: false
|
158 |
+
[2021-03-28 22:24:02] [config] sync-sgd: true
|
159 |
+
[2021-03-28 22:24:02] [config] tempdir: /run/nvme/job_5338591/data
|
160 |
+
[2021-03-28 22:24:02] [config] tied-embeddings: false
|
161 |
+
[2021-03-28 22:24:02] [config] tied-embeddings-all: true
|
162 |
+
[2021-03-28 22:24:02] [config] tied-embeddings-src: false
|
163 |
+
[2021-03-28 22:24:02] [config] train-embedder-rank:
|
164 |
+
[2021-03-28 22:24:02] [config] []
|
165 |
+
[2021-03-28 22:24:02] [config] train-sets:
|
166 |
+
[2021-03-28 22:24:02] [config] - /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/train/opus+bt.src.clean.spm32k.gz
|
167 |
+
[2021-03-28 22:24:02] [config] - /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/train/opus+bt.trg.clean.spm32k.gz
|
168 |
+
[2021-03-28 22:24:02] [config] transformer-aan-activation: swish
|
169 |
+
[2021-03-28 22:24:02] [config] transformer-aan-depth: 2
|
170 |
+
[2021-03-28 22:24:02] [config] transformer-aan-nogate: false
|
171 |
+
[2021-03-28 22:24:02] [config] transformer-decoder-autoreg: self-attention
|
172 |
+
[2021-03-28 22:24:02] [config] transformer-depth-scaling: false
|
173 |
+
[2021-03-28 22:24:02] [config] transformer-dim-aan: 2048
|
174 |
+
[2021-03-28 22:24:02] [config] transformer-dim-ffn: 2048
|
175 |
+
[2021-03-28 22:24:02] [config] transformer-dropout: 0.1
|
176 |
+
[2021-03-28 22:24:02] [config] transformer-dropout-attention: 0
|
177 |
+
[2021-03-28 22:24:02] [config] transformer-dropout-ffn: 0
|
178 |
+
[2021-03-28 22:24:02] [config] transformer-ffn-activation: swish
|
179 |
+
[2021-03-28 22:24:02] [config] transformer-ffn-depth: 2
|
180 |
+
[2021-03-28 22:24:02] [config] transformer-guided-alignment-layer: last
|
181 |
+
[2021-03-28 22:24:02] [config] transformer-heads: 8
|
182 |
+
[2021-03-28 22:24:02] [config] transformer-no-projection: false
|
183 |
+
[2021-03-28 22:24:02] [config] transformer-pool: false
|
184 |
+
[2021-03-28 22:24:02] [config] transformer-postprocess: dan
|
185 |
+
[2021-03-28 22:24:02] [config] transformer-postprocess-emb: d
|
186 |
+
[2021-03-28 22:24:02] [config] transformer-postprocess-top: ""
|
187 |
+
[2021-03-28 22:24:02] [config] transformer-preprocess: ""
|
188 |
+
[2021-03-28 22:24:02] [config] transformer-tied-layers:
|
189 |
+
[2021-03-28 22:24:02] [config] []
|
190 |
+
[2021-03-28 22:24:02] [config] transformer-train-position-embeddings: false
|
191 |
+
[2021-03-28 22:24:02] [config] tsv: false
|
192 |
+
[2021-03-28 22:24:02] [config] tsv-fields: 0
|
193 |
+
[2021-03-28 22:24:02] [config] type: transformer
|
194 |
+
[2021-03-28 22:24:02] [config] ulr: false
|
195 |
+
[2021-03-28 22:24:02] [config] ulr-dim-emb: 0
|
196 |
+
[2021-03-28 22:24:02] [config] ulr-dropout: 0
|
197 |
+
[2021-03-28 22:24:02] [config] ulr-keys-vectors: ""
|
198 |
+
[2021-03-28 22:24:02] [config] ulr-query-vectors: ""
|
199 |
+
[2021-03-28 22:24:02] [config] ulr-softmax-temperature: 1
|
200 |
+
[2021-03-28 22:24:02] [config] ulr-trainable-transformation: false
|
201 |
+
[2021-03-28 22:24:02] [config] unlikelihood-loss: false
|
202 |
+
[2021-03-28 22:24:02] [config] valid-freq: 10000
|
203 |
+
[2021-03-28 22:24:02] [config] valid-log: /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.valid1.log
|
204 |
+
[2021-03-28 22:24:02] [config] valid-max-length: 1000
|
205 |
+
[2021-03-28 22:24:02] [config] valid-metrics:
|
206 |
+
[2021-03-28 22:24:02] [config] - perplexity
|
207 |
+
[2021-03-28 22:24:02] [config] valid-mini-batch: 16
|
208 |
+
[2021-03-28 22:24:02] [config] valid-reset-stalled: false
|
209 |
+
[2021-03-28 22:24:02] [config] valid-script-args:
|
210 |
+
[2021-03-28 22:24:02] [config] []
|
211 |
+
[2021-03-28 22:24:02] [config] valid-script-path: ""
|
212 |
+
[2021-03-28 22:24:02] [config] valid-sets:
|
213 |
+
[2021-03-28 22:24:02] [config] - /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/val/Tatoeba-dev.src.spm32k
|
214 |
+
[2021-03-28 22:24:02] [config] - /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/val/Tatoeba-dev.trg.spm32k
|
215 |
+
[2021-03-28 22:24:02] [config] valid-translation-output: ""
|
216 |
+
[2021-03-28 22:24:02] [config] vocabs:
|
217 |
+
[2021-03-28 22:24:02] [config] - /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.vocab.yml
|
218 |
+
[2021-03-28 22:24:02] [config] - /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.vocab.yml
|
219 |
+
[2021-03-28 22:24:02] [config] word-penalty: 0
|
220 |
+
[2021-03-28 22:24:02] [config] word-scores: false
|
221 |
+
[2021-03-28 22:24:02] [config] workspace: 24000
|
222 |
+
[2021-03-28 22:24:02] [config] Model is being created with Marian v1.10.0 6f6d484 2021-02-06 15:35:16 -0800
|
223 |
+
[2021-03-28 22:24:02] Using synchronous SGD
|
224 |
+
[2021-03-28 22:24:02] [data] Loading vocabulary from JSON/Yaml file /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.vocab.yml
|
225 |
+
[2021-03-28 22:24:03] [data] Setting vocabulary size for input 0 to 60,878
|
226 |
+
[2021-03-28 22:24:03] [data] Loading vocabulary from JSON/Yaml file /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.vocab.yml
|
227 |
+
[2021-03-28 22:24:03] [data] Setting vocabulary size for input 1 to 60,878
|
228 |
+
[2021-03-28 22:24:03] [data] Using word alignments from file /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/train/opus+bt.spm32k-spm32k.src-trg.alg.gz
|
229 |
+
[2021-03-28 22:24:03] [sqlite] Creating temporary database in /run/nvme/job_5338591/data
|
230 |
+
[2021-03-28 22:24:07] [sqlite] Inserted 1000000 lines
|
231 |
+
[2021-03-28 22:24:11] [sqlite] Inserted 2000000 lines
|
232 |
+
[2021-03-28 22:24:19] [sqlite] Inserted 4000000 lines
|
233 |
+
[2021-03-28 22:24:35] [sqlite] Inserted 8000000 lines
|
234 |
+
[2021-03-28 22:25:07] [sqlite] Inserted 16000000 lines
|
235 |
+
[2021-03-28 22:26:09] [sqlite] Inserted 32000000 lines
|
236 |
+
[2021-03-28 22:27:40] [sqlite] Inserted 64000000 lines
|
237 |
+
[2021-03-28 22:29:22] [sqlite] Inserted 90910822 lines
|
238 |
+
[2021-03-28 22:29:22] [sqlite] Creating primary index
|
239 |
+
[2021-03-28 22:30:09] [comm] Compiled without MPI support. Running as a single process on r18g02.bullx
|
240 |
+
[2021-03-28 22:30:09] [batching] Collecting statistics for batch fitting with step size 10
|
241 |
+
[2021-03-28 22:30:18] [memory] Extending reserved space to 24064 MB (device gpu0)
|
242 |
+
[2021-03-28 22:30:19] [memory] Extending reserved space to 24064 MB (device gpu1)
|
243 |
+
[2021-03-28 22:30:19] [memory] Extending reserved space to 24064 MB (device gpu2)
|
244 |
+
[2021-03-28 22:30:20] [memory] Extending reserved space to 24064 MB (device gpu3)
|
245 |
+
[2021-03-28 22:30:20] [comm] Using NCCL 2.8.3 for GPU communication
|
246 |
+
[2021-03-28 22:30:21] [comm] NCCLCommunicator constructed successfully
|
247 |
+
[2021-03-28 22:30:21] [training] Using 4 GPUs
|
248 |
+
[2021-03-28 22:30:22] [logits] Applying loss function for 1 factor(s)
|
249 |
+
[2021-03-28 22:30:22] [memory] Reserving 287 MB, device gpu0
|
250 |
+
[2021-03-28 22:30:22] [gpu] 16-bit TensorCores enabled for float32 matrix operations
|
251 |
+
[2021-03-28 22:30:23] [memory] Reserving 287 MB, device gpu0
|
252 |
+
[2021-03-28 22:33:08] [batching] Done. Typical MB size is 57,739 target words
|
253 |
+
[2021-03-28 22:33:09] [memory] Extending reserved space to 24064 MB (device gpu0)
|
254 |
+
[2021-03-28 22:33:09] [memory] Extending reserved space to 24064 MB (device gpu1)
|
255 |
+
[2021-03-28 22:33:09] [memory] Extending reserved space to 24064 MB (device gpu2)
|
256 |
+
[2021-03-28 22:33:09] [memory] Extending reserved space to 24064 MB (device gpu3)
|
257 |
+
[2021-03-28 22:33:09] [comm] Using NCCL 2.8.3 for GPU communication
|
258 |
+
[2021-03-28 22:33:10] [comm] NCCLCommunicator constructed successfully
|
259 |
+
[2021-03-28 22:33:10] [training] Using 4 GPUs
|
260 |
+
[2021-03-28 22:33:10] Training started
|
261 |
+
[2021-03-28 22:33:10] [sqlite] Selecting shuffled data
|
262 |
+
[2021-03-28 22:35:02] [training] Batches are processed as 1 process(es) x 4 devices/process
|
263 |
+
[2021-03-28 22:35:02] [memory] Reserving 287 MB, device gpu0
|
264 |
+
[2021-03-28 22:35:02] [memory] Reserving 287 MB, device gpu1
|
265 |
+
[2021-03-28 22:35:02] [memory] Reserving 287 MB, device gpu2
|
266 |
+
[2021-03-28 22:35:02] [memory] Reserving 287 MB, device gpu3
|
267 |
+
[2021-03-28 22:35:02] [memory] Reserving 287 MB, device gpu0
|
268 |
+
[2021-03-28 22:35:02] [memory] Reserving 287 MB, device gpu2
|
269 |
+
[2021-03-28 22:35:02] [memory] Reserving 287 MB, device gpu1
|
270 |
+
[2021-03-28 22:35:02] [memory] Reserving 287 MB, device gpu3
|
271 |
+
[2021-03-28 22:35:02] [memory] Reserving 71 MB, device gpu0
|
272 |
+
[2021-03-28 22:35:02] [memory] Reserving 71 MB, device gpu1
|
273 |
+
[2021-03-28 22:35:02] [memory] Reserving 71 MB, device gpu2
|
274 |
+
[2021-03-28 22:35:02] [memory] Reserving 71 MB, device gpu3
|
275 |
+
[2021-03-28 22:35:02] [memory] Reserving 143 MB, device gpu0
|
276 |
+
[2021-03-28 22:35:02] [memory] Reserving 143 MB, device gpu3
|
277 |
+
[2021-03-28 22:35:02] [memory] Reserving 143 MB, device gpu2
|
278 |
+
[2021-03-28 22:35:02] [memory] Reserving 143 MB, device gpu1
|
279 |
+
[2021-03-29 00:40:05] Ep. 1 : Up. 10000 : Sen. 7,937,432 : Cost 0.76499343 * 1,539,648,423 @ 19,932 after 1,539,648,423 : Time 7616.30s : 25243.16 words/s : L.r. 1.8750e-04
|
280 |
+
[2021-03-29 00:40:05] Saving model weights and runtime parameters to /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
281 |
+
[2021-03-29 00:40:07] Saving model weights and runtime parameters to /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
282 |
+
[2021-03-29 00:40:09] Saving Adam parameters to /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
283 |
+
[2021-03-29 00:40:16] Saving model weights and runtime parameters to /scratch/project_2001194/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
284 |
+
[2021-03-29 00:40:17] [valid] Ep. 1 : Up. 10000 : perplexity : 6.61571 : new best
|
285 |
+
[2021-04-02 20:06:19] [marian] Marian v1.10.0 6f6d484 2021-02-06 15:35:16 -0800
|
286 |
+
[2021-04-02 20:06:19] [marian] Running on r03g02.bullx as process 131947 with command line:
|
287 |
+
[2021-04-02 20:06:19] [marian] /projappl/project_2001194/marian/build/marian --guided-alignment /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/train/opus+bt.spm32k-spm32k.src-trg.alg.gz --early-stopping 15 --valid-freq 10000 --valid-sets /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/val/Tatoeba-dev.src.spm32k /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/val/Tatoeba-dev.trg.spm32k --valid-metrics perplexity --valid-mini-batch 16 --valid-log /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.valid1.log --beam-size 12 --normalize 1 --allow-unk --overwrite --keep-best --model /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz --type transformer --train-sets /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/train/opus+bt.src.clean.spm32k.gz /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/train/opus+bt.trg.clean.spm32k.gz --max-length 500 --vocabs /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.vocab.yml /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.vocab.yml --mini-batch-fit -w 24000 --maxi-batch 500 --save-freq 10000 --disp-freq 10000 --log /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.train1.log --enc-depth 6 --dec-depth 6 --transformer-heads 8 --transformer-postprocess-emb d --transformer-postprocess dan --transformer-dropout 0.1 --label-smoothing 0.1 --learn-rate 0.0003 --lr-warmup 16000 --lr-decay-inv-sqrt 16000 --lr-report --optimizer-params 0.9 0.98 1e-09 --clip-norm 5 --fp16 --tied-embeddings-all --devices 0 1 2 3 --sync-sgd --seed 1111 --sqlite --tempdir /run/nvme/job_5393846/data --exponential-smoothing
|
288 |
+
[2021-04-02 20:06:20] [config] after: 0e
|
289 |
+
[2021-04-02 20:06:20] [config] after-batches: 0
|
290 |
+
[2021-04-02 20:06:20] [config] after-epochs: 0
|
291 |
+
[2021-04-02 20:06:20] [config] all-caps-every: 0
|
292 |
+
[2021-04-02 20:06:20] [config] allow-unk: true
|
293 |
+
[2021-04-02 20:06:20] [config] authors: false
|
294 |
+
[2021-04-02 20:06:20] [config] beam-size: 12
|
295 |
+
[2021-04-02 20:06:20] [config] bert-class-symbol: "[CLS]"
|
296 |
+
[2021-04-02 20:06:20] [config] bert-mask-symbol: "[MASK]"
|
297 |
+
[2021-04-02 20:06:20] [config] bert-masking-fraction: 0.15
|
298 |
+
[2021-04-02 20:06:20] [config] bert-sep-symbol: "[SEP]"
|
299 |
+
[2021-04-02 20:06:20] [config] bert-train-type-embeddings: true
|
300 |
+
[2021-04-02 20:06:20] [config] bert-type-vocab-size: 2
|
301 |
+
[2021-04-02 20:06:20] [config] build-info: ""
|
302 |
+
[2021-04-02 20:06:20] [config] cite: false
|
303 |
+
[2021-04-02 20:06:20] [config] clip-norm: 5
|
304 |
+
[2021-04-02 20:06:20] [config] cost-scaling:
|
305 |
+
[2021-04-02 20:06:20] [config] - 7
|
306 |
+
[2021-04-02 20:06:20] [config] - 2000
|
307 |
+
[2021-04-02 20:06:20] [config] - 2
|
308 |
+
[2021-04-02 20:06:20] [config] - 0.05
|
309 |
+
[2021-04-02 20:06:20] [config] - 10
|
310 |
+
[2021-04-02 20:06:20] [config] - 1
|
311 |
+
[2021-04-02 20:06:20] [config] cost-type: ce-sum
|
312 |
+
[2021-04-02 20:06:20] [config] cpu-threads: 0
|
313 |
+
[2021-04-02 20:06:20] [config] data-weighting: ""
|
314 |
+
[2021-04-02 20:06:20] [config] data-weighting-type: sentence
|
315 |
+
[2021-04-02 20:06:20] [config] dec-cell: gru
|
316 |
+
[2021-04-02 20:06:20] [config] dec-cell-base-depth: 2
|
317 |
+
[2021-04-02 20:06:20] [config] dec-cell-high-depth: 1
|
318 |
+
[2021-04-02 20:06:20] [config] dec-depth: 6
|
319 |
+
[2021-04-02 20:06:20] [config] devices:
|
320 |
+
[2021-04-02 20:06:20] [config] - 0
|
321 |
+
[2021-04-02 20:06:20] [config] - 1
|
322 |
+
[2021-04-02 20:06:20] [config] - 2
|
323 |
+
[2021-04-02 20:06:20] [config] - 3
|
324 |
+
[2021-04-02 20:06:20] [config] dim-emb: 512
|
325 |
+
[2021-04-02 20:06:20] [config] dim-rnn: 1024
|
326 |
+
[2021-04-02 20:06:20] [config] dim-vocabs:
|
327 |
+
[2021-04-02 20:06:20] [config] - 60878
|
328 |
+
[2021-04-02 20:06:20] [config] - 60878
|
329 |
+
[2021-04-02 20:06:20] [config] disp-first: 0
|
330 |
+
[2021-04-02 20:06:20] [config] disp-freq: 10000
|
331 |
+
[2021-04-02 20:06:20] [config] disp-label-counts: true
|
332 |
+
[2021-04-02 20:06:20] [config] dropout-rnn: 0
|
333 |
+
[2021-04-02 20:06:20] [config] dropout-src: 0
|
334 |
+
[2021-04-02 20:06:20] [config] dropout-trg: 0
|
335 |
+
[2021-04-02 20:06:20] [config] dump-config: ""
|
336 |
+
[2021-04-02 20:06:20] [config] early-stopping: 15
|
337 |
+
[2021-04-02 20:06:20] [config] embedding-fix-src: false
|
338 |
+
[2021-04-02 20:06:20] [config] embedding-fix-trg: false
|
339 |
+
[2021-04-02 20:06:20] [config] embedding-normalization: false
|
340 |
+
[2021-04-02 20:06:20] [config] embedding-vectors:
|
341 |
+
[2021-04-02 20:06:20] [config] []
|
342 |
+
[2021-04-02 20:06:20] [config] enc-cell: gru
|
343 |
+
[2021-04-02 20:06:20] [config] enc-cell-depth: 1
|
344 |
+
[2021-04-02 20:06:20] [config] enc-depth: 6
|
345 |
+
[2021-04-02 20:06:20] [config] enc-type: bidirectional
|
346 |
+
[2021-04-02 20:06:20] [config] english-title-case-every: 0
|
347 |
+
[2021-04-02 20:06:20] [config] exponential-smoothing: 0.0001
|
348 |
+
[2021-04-02 20:06:20] [config] factor-weight: 1
|
349 |
+
[2021-04-02 20:06:20] [config] grad-dropping-momentum: 0
|
350 |
+
[2021-04-02 20:06:20] [config] grad-dropping-rate: 0
|
351 |
+
[2021-04-02 20:06:20] [config] grad-dropping-warmup: 100
|
352 |
+
[2021-04-02 20:06:20] [config] gradient-checkpointing: false
|
353 |
+
[2021-04-02 20:06:20] [config] guided-alignment: /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/train/opus+bt.spm32k-spm32k.src-trg.alg.gz
|
354 |
+
[2021-04-02 20:06:20] [config] guided-alignment-cost: mse
|
355 |
+
[2021-04-02 20:06:20] [config] guided-alignment-weight: 0.1
|
356 |
+
[2021-04-02 20:06:20] [config] ignore-model-config: false
|
357 |
+
[2021-04-02 20:06:20] [config] input-types:
|
358 |
+
[2021-04-02 20:06:20] [config] []
|
359 |
+
[2021-04-02 20:06:20] [config] interpolate-env-vars: false
|
360 |
+
[2021-04-02 20:06:20] [config] keep-best: true
|
361 |
+
[2021-04-02 20:06:20] [config] label-smoothing: 0.1
|
362 |
+
[2021-04-02 20:06:20] [config] layer-normalization: false
|
363 |
+
[2021-04-02 20:06:20] [config] learn-rate: 0.0003
|
364 |
+
[2021-04-02 20:06:20] [config] lemma-dim-emb: 0
|
365 |
+
[2021-04-02 20:06:20] [config] log: /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.train1.log
|
366 |
+
[2021-04-02 20:06:20] [config] log-level: info
|
367 |
+
[2021-04-02 20:06:20] [config] log-time-zone: ""
|
368 |
+
[2021-04-02 20:06:20] [config] logical-epoch:
|
369 |
+
[2021-04-02 20:06:20] [config] - 1e
|
370 |
+
[2021-04-02 20:06:20] [config] - 0
|
371 |
+
[2021-04-02 20:06:20] [config] lr-decay: 0
|
372 |
+
[2021-04-02 20:06:20] [config] lr-decay-freq: 50000
|
373 |
+
[2021-04-02 20:06:20] [config] lr-decay-inv-sqrt:
|
374 |
+
[2021-04-02 20:06:20] [config] - 16000
|
375 |
+
[2021-04-02 20:06:20] [config] lr-decay-repeat-warmup: false
|
376 |
+
[2021-04-02 20:06:20] [config] lr-decay-reset-optimizer: false
|
377 |
+
[2021-04-02 20:06:20] [config] lr-decay-start:
|
378 |
+
[2021-04-02 20:06:20] [config] - 10
|
379 |
+
[2021-04-02 20:06:20] [config] - 1
|
380 |
+
[2021-04-02 20:06:20] [config] lr-decay-strategy: epoch+stalled
|
381 |
+
[2021-04-02 20:06:20] [config] lr-report: true
|
382 |
+
[2021-04-02 20:06:20] [config] lr-warmup: 16000
|
383 |
+
[2021-04-02 20:06:20] [config] lr-warmup-at-reload: false
|
384 |
+
[2021-04-02 20:06:20] [config] lr-warmup-cycle: false
|
385 |
+
[2021-04-02 20:06:20] [config] lr-warmup-start-rate: 0
|
386 |
+
[2021-04-02 20:06:20] [config] max-length: 500
|
387 |
+
[2021-04-02 20:06:20] [config] max-length-crop: false
|
388 |
+
[2021-04-02 20:06:20] [config] max-length-factor: 3
|
389 |
+
[2021-04-02 20:06:20] [config] maxi-batch: 500
|
390 |
+
[2021-04-02 20:06:20] [config] maxi-batch-sort: trg
|
391 |
+
[2021-04-02 20:06:20] [config] mini-batch: 64
|
392 |
+
[2021-04-02 20:06:20] [config] mini-batch-fit: true
|
393 |
+
[2021-04-02 20:06:20] [config] mini-batch-fit-step: 10
|
394 |
+
[2021-04-02 20:06:20] [config] mini-batch-track-lr: false
|
395 |
+
[2021-04-02 20:06:20] [config] mini-batch-warmup: 0
|
396 |
+
[2021-04-02 20:06:20] [config] mini-batch-words: 0
|
397 |
+
[2021-04-02 20:06:20] [config] mini-batch-words-ref: 0
|
398 |
+
[2021-04-02 20:06:20] [config] model: /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
399 |
+
[2021-04-02 20:06:20] [config] multi-loss-type: sum
|
400 |
+
[2021-04-02 20:06:20] [config] multi-node: false
|
401 |
+
[2021-04-02 20:06:20] [config] multi-node-overlap: true
|
402 |
+
[2021-04-02 20:06:20] [config] n-best: false
|
403 |
+
[2021-04-02 20:06:20] [config] no-nccl: false
|
404 |
+
[2021-04-02 20:06:20] [config] no-reload: false
|
405 |
+
[2021-04-02 20:06:20] [config] no-restore-corpus: false
|
406 |
+
[2021-04-02 20:06:20] [config] normalize: 1
|
407 |
+
[2021-04-02 20:06:20] [config] normalize-gradient: false
|
408 |
+
[2021-04-02 20:06:20] [config] num-devices: 0
|
409 |
+
[2021-04-02 20:06:20] [config] optimizer: adam
|
410 |
+
[2021-04-02 20:06:20] [config] optimizer-delay: 1
|
411 |
+
[2021-04-02 20:06:20] [config] optimizer-params:
|
412 |
+
[2021-04-02 20:06:20] [config] - 0.9
|
413 |
+
[2021-04-02 20:06:20] [config] - 0.98
|
414 |
+
[2021-04-02 20:06:20] [config] - 1e-09
|
415 |
+
[2021-04-02 20:06:20] [config] output-omit-bias: false
|
416 |
+
[2021-04-02 20:06:20] [config] overwrite: true
|
417 |
+
[2021-04-02 20:06:20] [config] precision:
|
418 |
+
[2021-04-02 20:06:20] [config] - float16
|
419 |
+
[2021-04-02 20:06:20] [config] - float32
|
420 |
+
[2021-04-02 20:06:20] [config] - float32
|
421 |
+
[2021-04-02 20:06:20] [config] pretrained-model: ""
|
422 |
+
[2021-04-02 20:06:20] [config] quantize-biases: false
|
423 |
+
[2021-04-02 20:06:20] [config] quantize-bits: 0
|
424 |
+
[2021-04-02 20:06:20] [config] quantize-log-based: false
|
425 |
+
[2021-04-02 20:06:20] [config] quantize-optimization-steps: 0
|
426 |
+
[2021-04-02 20:06:20] [config] quiet: false
|
427 |
+
[2021-04-02 20:06:20] [config] quiet-translation: false
|
428 |
+
[2021-04-02 20:06:20] [config] relative-paths: false
|
429 |
+
[2021-04-02 20:06:20] [config] right-left: false
|
430 |
+
[2021-04-02 20:06:20] [config] save-freq: 10000
|
431 |
+
[2021-04-02 20:06:20] [config] seed: 1111
|
432 |
+
[2021-04-02 20:06:20] [config] sentencepiece-alphas:
|
433 |
+
[2021-04-02 20:06:20] [config] []
|
434 |
+
[2021-04-02 20:06:20] [config] sentencepiece-max-lines: 2000000
|
435 |
+
[2021-04-02 20:06:20] [config] sentencepiece-options: ""
|
436 |
+
[2021-04-02 20:06:20] [config] shuffle: data
|
437 |
+
[2021-04-02 20:06:20] [config] shuffle-in-ram: false
|
438 |
+
[2021-04-02 20:06:20] [config] sigterm: save-and-exit
|
439 |
+
[2021-04-02 20:06:20] [config] skip: false
|
440 |
+
[2021-04-02 20:06:20] [config] sqlite: temporary
|
441 |
+
[2021-04-02 20:06:20] [config] sqlite-drop: false
|
442 |
+
[2021-04-02 20:06:20] [config] sync-sgd: true
|
443 |
+
[2021-04-02 20:06:20] [config] tempdir: /run/nvme/job_5393846/data
|
444 |
+
[2021-04-02 20:06:20] [config] tied-embeddings: false
|
445 |
+
[2021-04-02 20:06:20] [config] tied-embeddings-all: true
|
446 |
+
[2021-04-02 20:06:20] [config] tied-embeddings-src: false
|
447 |
+
[2021-04-02 20:06:20] [config] train-embedder-rank:
|
448 |
+
[2021-04-02 20:06:20] [config] []
|
449 |
+
[2021-04-02 20:06:20] [config] train-sets:
|
450 |
+
[2021-04-02 20:06:20] [config] - /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/train/opus+bt.src.clean.spm32k.gz
|
451 |
+
[2021-04-02 20:06:20] [config] - /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/train/opus+bt.trg.clean.spm32k.gz
|
452 |
+
[2021-04-02 20:06:20] [config] transformer-aan-activation: swish
|
453 |
+
[2021-04-02 20:06:20] [config] transformer-aan-depth: 2
|
454 |
+
[2021-04-02 20:06:20] [config] transformer-aan-nogate: false
|
455 |
+
[2021-04-02 20:06:20] [config] transformer-decoder-autoreg: self-attention
|
456 |
+
[2021-04-02 20:06:20] [config] transformer-depth-scaling: false
|
457 |
+
[2021-04-02 20:06:20] [config] transformer-dim-aan: 2048
|
458 |
+
[2021-04-02 20:06:20] [config] transformer-dim-ffn: 2048
|
459 |
+
[2021-04-02 20:06:20] [config] transformer-dropout: 0.1
|
460 |
+
[2021-04-02 20:06:20] [config] transformer-dropout-attention: 0
|
461 |
+
[2021-04-02 20:06:20] [config] transformer-dropout-ffn: 0
|
462 |
+
[2021-04-02 20:06:20] [config] transformer-ffn-activation: swish
|
463 |
+
[2021-04-02 20:06:20] [config] transformer-ffn-depth: 2
|
464 |
+
[2021-04-02 20:06:20] [config] transformer-guided-alignment-layer: last
|
465 |
+
[2021-04-02 20:06:20] [config] transformer-heads: 8
|
466 |
+
[2021-04-02 20:06:20] [config] transformer-no-projection: false
|
467 |
+
[2021-04-02 20:06:20] [config] transformer-pool: false
|
468 |
+
[2021-04-02 20:06:20] [config] transformer-postprocess: dan
|
469 |
+
[2021-04-02 20:06:20] [config] transformer-postprocess-emb: d
|
470 |
+
[2021-04-02 20:06:20] [config] transformer-postprocess-top: ""
|
471 |
+
[2021-04-02 20:06:20] [config] transformer-preprocess: ""
|
472 |
+
[2021-04-02 20:06:20] [config] transformer-tied-layers:
|
473 |
+
[2021-04-02 20:06:20] [config] []
|
474 |
+
[2021-04-02 20:06:20] [config] transformer-train-position-embeddings: false
|
475 |
+
[2021-04-02 20:06:20] [config] tsv: false
|
476 |
+
[2021-04-02 20:06:20] [config] tsv-fields: 0
|
477 |
+
[2021-04-02 20:06:20] [config] type: transformer
|
478 |
+
[2021-04-02 20:06:20] [config] ulr: false
|
479 |
+
[2021-04-02 20:06:20] [config] ulr-dim-emb: 0
|
480 |
+
[2021-04-02 20:06:20] [config] ulr-dropout: 0
|
481 |
+
[2021-04-02 20:06:20] [config] ulr-keys-vectors: ""
|
482 |
+
[2021-04-02 20:06:20] [config] ulr-query-vectors: ""
|
483 |
+
[2021-04-02 20:06:20] [config] ulr-softmax-temperature: 1
|
484 |
+
[2021-04-02 20:06:20] [config] ulr-trainable-transformation: false
|
485 |
+
[2021-04-02 20:06:20] [config] unlikelihood-loss: false
|
486 |
+
[2021-04-02 20:06:20] [config] valid-freq: 10000
|
487 |
+
[2021-04-02 20:06:20] [config] valid-log: /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.valid1.log
|
488 |
+
[2021-04-02 20:06:20] [config] valid-max-length: 1000
|
489 |
+
[2021-04-02 20:06:20] [config] valid-metrics:
|
490 |
+
[2021-04-02 20:06:20] [config] - perplexity
|
491 |
+
[2021-04-02 20:06:20] [config] valid-mini-batch: 16
|
492 |
+
[2021-04-02 20:06:20] [config] valid-reset-stalled: false
|
493 |
+
[2021-04-02 20:06:20] [config] valid-script-args:
|
494 |
+
[2021-04-02 20:06:20] [config] []
|
495 |
+
[2021-04-02 20:06:20] [config] valid-script-path: ""
|
496 |
+
[2021-04-02 20:06:20] [config] valid-sets:
|
497 |
+
[2021-04-02 20:06:20] [config] - /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/val/Tatoeba-dev.src.spm32k
|
498 |
+
[2021-04-02 20:06:20] [config] - /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/val/Tatoeba-dev.trg.spm32k
|
499 |
+
[2021-04-02 20:06:20] [config] valid-translation-output: ""
|
500 |
+
[2021-04-02 20:06:20] [config] version: v1.10.0 6f6d484 2021-02-06 15:35:16 -0800
|
501 |
+
[2021-04-02 20:06:20] [config] vocabs:
|
502 |
+
[2021-04-02 20:06:20] [config] - /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.vocab.yml
|
503 |
+
[2021-04-02 20:06:20] [config] - /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.vocab.yml
|
504 |
+
[2021-04-02 20:06:20] [config] word-penalty: 0
|
505 |
+
[2021-04-02 20:06:20] [config] word-scores: false
|
506 |
+
[2021-04-02 20:06:20] [config] workspace: 24000
|
507 |
+
[2021-04-02 20:06:20] [config] Loaded model has been created with Marian v1.10.0 6f6d484 2021-02-06 15:35:16 -0800
|
508 |
+
[2021-04-02 20:06:20] Using synchronous SGD
|
509 |
+
[2021-04-02 20:06:20] [data] Loading vocabulary from JSON/Yaml file /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.vocab.yml
|
510 |
+
[2021-04-02 20:06:20] [data] Setting vocabulary size for input 0 to 60,878
|
511 |
+
[2021-04-02 20:06:20] [data] Loading vocabulary from JSON/Yaml file /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.vocab.yml
|
512 |
+
[2021-04-02 20:06:21] [data] Setting vocabulary size for input 1 to 60,878
|
513 |
+
[2021-04-02 20:06:21] [data] Using word alignments from file /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/train/opus+bt.spm32k-spm32k.src-trg.alg.gz
|
514 |
+
[2021-04-02 20:06:21] [sqlite] Creating temporary database in /run/nvme/job_5393846/data
|
515 |
+
[2021-04-02 20:06:25] [sqlite] Inserted 1000000 lines
|
516 |
+
[2021-04-02 20:06:29] [sqlite] Inserted 2000000 lines
|
517 |
+
[2021-04-02 20:06:37] [sqlite] Inserted 4000000 lines
|
518 |
+
[2021-04-02 20:06:52] [sqlite] Inserted 8000000 lines
|
519 |
+
[2021-04-02 20:07:24] [sqlite] Inserted 16000000 lines
|
520 |
+
[2021-04-02 20:08:26] [sqlite] Inserted 32000000 lines
|
521 |
+
[2021-04-02 20:09:57] [sqlite] Inserted 64000000 lines
|
522 |
+
[2021-04-02 20:11:39] [sqlite] Inserted 90910822 lines
|
523 |
+
[2021-04-02 20:11:39] [sqlite] Creating primary index
|
524 |
+
[2021-04-02 20:12:26] [comm] Compiled without MPI support. Running as a single process on r03g02.bullx
|
525 |
+
[2021-04-02 20:12:26] [batching] Collecting statistics for batch fitting with step size 10
|
526 |
+
[2021-04-02 20:12:37] [memory] Extending reserved space to 24064 MB (device gpu0)
|
527 |
+
[2021-04-02 20:12:38] [memory] Extending reserved space to 24064 MB (device gpu1)
|
528 |
+
[2021-04-02 20:12:39] [memory] Extending reserved space to 24064 MB (device gpu2)
|
529 |
+
[2021-04-02 20:12:39] [memory] Extending reserved space to 24064 MB (device gpu3)
|
530 |
+
[2021-04-02 20:12:39] [comm] Using NCCL 2.8.3 for GPU communication
|
531 |
+
[2021-04-02 20:12:41] [comm] NCCLCommunicator constructed successfully
|
532 |
+
[2021-04-02 20:12:41] [training] Using 4 GPUs
|
533 |
+
[2021-04-02 20:12:41] [logits] Applying loss function for 1 factor(s)
|
534 |
+
[2021-04-02 20:12:41] [memory] Reserving 287 MB, device gpu0
|
535 |
+
[2021-04-02 20:12:42] [gpu] 16-bit TensorCores enabled for float32 matrix operations
|
536 |
+
[2021-04-02 20:12:43] [memory] Reserving 287 MB, device gpu0
|
537 |
+
[2021-04-02 20:15:28] [batching] Done. Typical MB size is 57,739 target words
|
538 |
+
[2021-04-02 20:15:29] [memory] Extending reserved space to 24064 MB (device gpu0)
|
539 |
+
[2021-04-02 20:15:29] [memory] Extending reserved space to 24064 MB (device gpu1)
|
540 |
+
[2021-04-02 20:15:29] [memory] Extending reserved space to 24064 MB (device gpu2)
|
541 |
+
[2021-04-02 20:15:30] [memory] Extending reserved space to 24064 MB (device gpu3)
|
542 |
+
[2021-04-02 20:15:30] [comm] Using NCCL 2.8.3 for GPU communication
|
543 |
+
[2021-04-02 20:15:31] [comm] NCCLCommunicator constructed successfully
|
544 |
+
[2021-04-02 20:15:31] [training] Using 4 GPUs
|
545 |
+
[2021-04-02 20:15:31] Loading model from /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
546 |
+
[2021-04-02 20:15:32] Loading model from /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
547 |
+
[2021-04-02 20:15:32] Loading model from /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
548 |
+
[2021-04-02 20:15:33] Loading model from /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
549 |
+
[2021-04-02 20:15:33] Loading Adam parameters from /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
550 |
+
[2021-04-02 20:15:35] [memory] Reserving 143 MB, device gpu0
|
551 |
+
[2021-04-02 20:15:35] [memory] Reserving 143 MB, device gpu1
|
552 |
+
[2021-04-02 20:15:35] [memory] Reserving 143 MB, device gpu2
|
553 |
+
[2021-04-02 20:15:35] [memory] Reserving 143 MB, device gpu3
|
554 |
+
[2021-04-02 20:15:35] [training] Model reloaded from /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
555 |
+
[2021-04-02 20:15:35] [data] Restoring the corpus state to epoch 1, batch 10000
|
556 |
+
[2021-04-02 20:15:35] [sqlite] Selecting shuffled data
|
557 |
+
[2021-04-02 20:20:55] Training started
|
558 |
+
[2021-04-02 20:20:55] [training] Batches are processed as 1 process(es) x 4 devices/process
|
559 |
+
[2021-04-02 20:20:55] [memory] Reserving 287 MB, device gpu0
|
560 |
+
[2021-04-02 20:20:55] [memory] Reserving 287 MB, device gpu1
|
561 |
+
[2021-04-02 20:20:55] [memory] Reserving 287 MB, device gpu3
|
562 |
+
[2021-04-02 20:20:55] [memory] Reserving 287 MB, device gpu2
|
563 |
+
[2021-04-02 20:20:56] [memory] Reserving 287 MB, device gpu0
|
564 |
+
[2021-04-02 20:20:56] [memory] Reserving 287 MB, device gpu1
|
565 |
+
[2021-04-02 20:20:56] [memory] Reserving 287 MB, device gpu3
|
566 |
+
[2021-04-02 20:20:57] [memory] Reserving 287 MB, device gpu2
|
567 |
+
[2021-04-02 20:20:57] Loading model from /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
568 |
+
[2021-04-02 20:20:58] [memory] Reserving 287 MB, device cpu0
|
569 |
+
[2021-04-02 20:20:58] [memory] Reserving 71 MB, device gpu0
|
570 |
+
[2021-04-02 20:20:58] [memory] Reserving 71 MB, device gpu1
|
571 |
+
[2021-04-02 20:20:58] [memory] Reserving 71 MB, device gpu2
|
572 |
+
[2021-04-02 20:20:58] [memory] Reserving 71 MB, device gpu3
|
573 |
+
[2021-04-02 22:25:35] Ep. 1 : Up. 20000 : Sen. 15,852,639 : Cost 0.42225203 * 1,534,689,396 @ 21,129 after 3,074,337,819 : Time 7806.09s : 24575.01 words/s : L.r. 2.6833e-04
|
574 |
+
[2021-04-02 22:25:35] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
575 |
+
[2021-04-02 22:25:38] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
576 |
+
[2021-04-02 22:25:40] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
577 |
+
[2021-04-02 22:25:47] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
578 |
+
[2021-04-02 22:25:48] [valid] Ep. 1 : Up. 20000 : perplexity : 3.20918 : new best
|
579 |
+
[2021-04-10 20:28:59] [marian] Marian v1.10.0 6f6d484 2021-02-06 15:35:16 -0800
|
580 |
+
[2021-04-10 20:28:59] [marian] Running on r13g05.bullx as process 132622 with command line:
|
581 |
+
[2021-04-10 20:28:59] [marian] /projappl/project_2001194/marian/build/marian --guided-alignment /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/train/opus+bt.spm32k-spm32k.src-trg.alg.gz --early-stopping 15 --valid-freq 10000 --valid-sets /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/val/Tatoeba-dev.src.spm32k /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/val/Tatoeba-dev.trg.spm32k --valid-metrics perplexity --valid-mini-batch 16 --valid-log /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.valid1.log --beam-size 12 --normalize 1 --allow-unk --overwrite --keep-best --model /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz --type transformer --train-sets /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/train/opus+bt.src.clean.spm32k.gz /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/train/opus+bt.trg.clean.spm32k.gz --max-length 500 --vocabs /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.vocab.yml /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.vocab.yml --mini-batch-fit -w 24000 --maxi-batch 500 --save-freq 10000 --disp-freq 10000 --log /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.train1.log --enc-depth 6 --dec-depth 6 --transformer-heads 8 --transformer-postprocess-emb d --transformer-postprocess dan --transformer-dropout 0.1 --label-smoothing 0.1 --learn-rate 0.0003 --lr-warmup 16000 --lr-decay-inv-sqrt 16000 --lr-report --optimizer-params 0.9 0.98 1e-09 --clip-norm 5 --fp16 --tied-embeddings-all --devices 0 1 2 3 --sync-sgd --seed 1111 --sqlite --tempdir /run/nvme/job_5481217/data --exponential-smoothing
|
582 |
+
[2021-04-10 20:29:01] [config] after: 0e
|
583 |
+
[2021-04-10 20:29:01] [config] after-batches: 0
|
584 |
+
[2021-04-10 20:29:01] [config] after-epochs: 0
|
585 |
+
[2021-04-10 20:29:01] [config] all-caps-every: 0
|
586 |
+
[2021-04-10 20:29:01] [config] allow-unk: true
|
587 |
+
[2021-04-10 20:29:01] [config] authors: false
|
588 |
+
[2021-04-10 20:29:01] [config] beam-size: 12
|
589 |
+
[2021-04-10 20:29:01] [config] bert-class-symbol: "[CLS]"
|
590 |
+
[2021-04-10 20:29:01] [config] bert-mask-symbol: "[MASK]"
|
591 |
+
[2021-04-10 20:29:01] [config] bert-masking-fraction: 0.15
|
592 |
+
[2021-04-10 20:29:01] [config] bert-sep-symbol: "[SEP]"
|
593 |
+
[2021-04-10 20:29:01] [config] bert-train-type-embeddings: true
|
594 |
+
[2021-04-10 20:29:01] [config] bert-type-vocab-size: 2
|
595 |
+
[2021-04-10 20:29:01] [config] build-info: ""
|
596 |
+
[2021-04-10 20:29:01] [config] cite: false
|
597 |
+
[2021-04-10 20:29:01] [config] clip-norm: 5
|
598 |
+
[2021-04-10 20:29:01] [config] cost-scaling:
|
599 |
+
[2021-04-10 20:29:01] [config] - 7
|
600 |
+
[2021-04-10 20:29:01] [config] - 2000
|
601 |
+
[2021-04-10 20:29:01] [config] - 2
|
602 |
+
[2021-04-10 20:29:01] [config] - 0.05
|
603 |
+
[2021-04-10 20:29:01] [config] - 10
|
604 |
+
[2021-04-10 20:29:01] [config] - 1
|
605 |
+
[2021-04-10 20:29:01] [config] cost-type: ce-sum
|
606 |
+
[2021-04-10 20:29:01] [config] cpu-threads: 0
|
607 |
+
[2021-04-10 20:29:01] [config] data-weighting: ""
|
608 |
+
[2021-04-10 20:29:01] [config] data-weighting-type: sentence
|
609 |
+
[2021-04-10 20:29:01] [config] dec-cell: gru
|
610 |
+
[2021-04-10 20:29:01] [config] dec-cell-base-depth: 2
|
611 |
+
[2021-04-10 20:29:01] [config] dec-cell-high-depth: 1
|
612 |
+
[2021-04-10 20:29:01] [config] dec-depth: 6
|
613 |
+
[2021-04-10 20:29:01] [config] devices:
|
614 |
+
[2021-04-10 20:29:01] [config] - 0
|
615 |
+
[2021-04-10 20:29:01] [config] - 1
|
616 |
+
[2021-04-10 20:29:01] [config] - 2
|
617 |
+
[2021-04-10 20:29:01] [config] - 3
|
618 |
+
[2021-04-10 20:29:01] [config] dim-emb: 512
|
619 |
+
[2021-04-10 20:29:01] [config] dim-rnn: 1024
|
620 |
+
[2021-04-10 20:29:01] [config] dim-vocabs:
|
621 |
+
[2021-04-10 20:29:01] [config] - 60878
|
622 |
+
[2021-04-10 20:29:01] [config] - 60878
|
623 |
+
[2021-04-10 20:29:01] [config] disp-first: 0
|
624 |
+
[2021-04-10 20:29:01] [config] disp-freq: 10000
|
625 |
+
[2021-04-10 20:29:01] [config] disp-label-counts: true
|
626 |
+
[2021-04-10 20:29:01] [config] dropout-rnn: 0
|
627 |
+
[2021-04-10 20:29:01] [config] dropout-src: 0
|
628 |
+
[2021-04-10 20:29:01] [config] dropout-trg: 0
|
629 |
+
[2021-04-10 20:29:01] [config] dump-config: ""
|
630 |
+
[2021-04-10 20:29:01] [config] early-stopping: 15
|
631 |
+
[2021-04-10 20:29:01] [config] embedding-fix-src: false
|
632 |
+
[2021-04-10 20:29:01] [config] embedding-fix-trg: false
|
633 |
+
[2021-04-10 20:29:01] [config] embedding-normalization: false
|
634 |
+
[2021-04-10 20:29:01] [config] embedding-vectors:
|
635 |
+
[2021-04-10 20:29:01] [config] []
|
636 |
+
[2021-04-10 20:29:01] [config] enc-cell: gru
|
637 |
+
[2021-04-10 20:29:01] [config] enc-cell-depth: 1
|
638 |
+
[2021-04-10 20:29:01] [config] enc-depth: 6
|
639 |
+
[2021-04-10 20:29:01] [config] enc-type: bidirectional
|
640 |
+
[2021-04-10 20:29:01] [config] english-title-case-every: 0
|
641 |
+
[2021-04-10 20:29:01] [config] exponential-smoothing: 0.0001
|
642 |
+
[2021-04-10 20:29:01] [config] factor-weight: 1
|
643 |
+
[2021-04-10 20:29:01] [config] grad-dropping-momentum: 0
|
644 |
+
[2021-04-10 20:29:01] [config] grad-dropping-rate: 0
|
645 |
+
[2021-04-10 20:29:01] [config] grad-dropping-warmup: 100
|
646 |
+
[2021-04-10 20:29:01] [config] gradient-checkpointing: false
|
647 |
+
[2021-04-10 20:29:01] [config] guided-alignment: /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/train/opus+bt.spm32k-spm32k.src-trg.alg.gz
|
648 |
+
[2021-04-10 20:29:01] [config] guided-alignment-cost: mse
|
649 |
+
[2021-04-10 20:29:01] [config] guided-alignment-weight: 0.1
|
650 |
+
[2021-04-10 20:29:01] [config] ignore-model-config: false
|
651 |
+
[2021-04-10 20:29:01] [config] input-types:
|
652 |
+
[2021-04-10 20:29:01] [config] []
|
653 |
+
[2021-04-10 20:29:01] [config] interpolate-env-vars: false
|
654 |
+
[2021-04-10 20:29:01] [config] keep-best: true
|
655 |
+
[2021-04-10 20:29:01] [config] label-smoothing: 0.1
|
656 |
+
[2021-04-10 20:29:01] [config] layer-normalization: false
|
657 |
+
[2021-04-10 20:29:01] [config] learn-rate: 0.0003
|
658 |
+
[2021-04-10 20:29:01] [config] lemma-dim-emb: 0
|
659 |
+
[2021-04-10 20:29:01] [config] log: /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.train1.log
|
660 |
+
[2021-04-10 20:29:01] [config] log-level: info
|
661 |
+
[2021-04-10 20:29:01] [config] log-time-zone: ""
|
662 |
+
[2021-04-10 20:29:01] [config] logical-epoch:
|
663 |
+
[2021-04-10 20:29:01] [config] - 1e
|
664 |
+
[2021-04-10 20:29:01] [config] - 0
|
665 |
+
[2021-04-10 20:29:01] [config] lr-decay: 0
|
666 |
+
[2021-04-10 20:29:01] [config] lr-decay-freq: 50000
|
667 |
+
[2021-04-10 20:29:01] [config] lr-decay-inv-sqrt:
|
668 |
+
[2021-04-10 20:29:01] [config] - 16000
|
669 |
+
[2021-04-10 20:29:01] [config] lr-decay-repeat-warmup: false
|
670 |
+
[2021-04-10 20:29:01] [config] lr-decay-reset-optimizer: false
|
671 |
+
[2021-04-10 20:29:01] [config] lr-decay-start:
|
672 |
+
[2021-04-10 20:29:01] [config] - 10
|
673 |
+
[2021-04-10 20:29:01] [config] - 1
|
674 |
+
[2021-04-10 20:29:01] [config] lr-decay-strategy: epoch+stalled
|
675 |
+
[2021-04-10 20:29:01] [config] lr-report: true
|
676 |
+
[2021-04-10 20:29:01] [config] lr-warmup: 16000
|
677 |
+
[2021-04-10 20:29:01] [config] lr-warmup-at-reload: false
|
678 |
+
[2021-04-10 20:29:01] [config] lr-warmup-cycle: false
|
679 |
+
[2021-04-10 20:29:01] [config] lr-warmup-start-rate: 0
|
680 |
+
[2021-04-10 20:29:01] [config] max-length: 500
|
681 |
+
[2021-04-10 20:29:01] [config] max-length-crop: false
|
682 |
+
[2021-04-10 20:29:01] [config] max-length-factor: 3
|
683 |
+
[2021-04-10 20:29:01] [config] maxi-batch: 500
|
684 |
+
[2021-04-10 20:29:01] [config] maxi-batch-sort: trg
|
685 |
+
[2021-04-10 20:29:01] [config] mini-batch: 64
|
686 |
+
[2021-04-10 20:29:01] [config] mini-batch-fit: true
|
687 |
+
[2021-04-10 20:29:01] [config] mini-batch-fit-step: 10
|
688 |
+
[2021-04-10 20:29:01] [config] mini-batch-track-lr: false
|
689 |
+
[2021-04-10 20:29:01] [config] mini-batch-warmup: 0
|
690 |
+
[2021-04-10 20:29:01] [config] mini-batch-words: 0
|
691 |
+
[2021-04-10 20:29:01] [config] mini-batch-words-ref: 0
|
692 |
+
[2021-04-10 20:29:01] [config] model: /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
693 |
+
[2021-04-10 20:29:01] [config] multi-loss-type: sum
|
694 |
+
[2021-04-10 20:29:01] [config] multi-node: false
|
695 |
+
[2021-04-10 20:29:01] [config] multi-node-overlap: true
|
696 |
+
[2021-04-10 20:29:01] [config] n-best: false
|
697 |
+
[2021-04-10 20:29:01] [config] no-nccl: false
|
698 |
+
[2021-04-10 20:29:01] [config] no-reload: false
|
699 |
+
[2021-04-10 20:29:01] [config] no-restore-corpus: false
|
700 |
+
[2021-04-10 20:29:01] [config] normalize: 1
|
701 |
+
[2021-04-10 20:29:01] [config] normalize-gradient: false
|
702 |
+
[2021-04-10 20:29:01] [config] num-devices: 0
|
703 |
+
[2021-04-10 20:29:01] [config] optimizer: adam
|
704 |
+
[2021-04-10 20:29:01] [config] optimizer-delay: 1
|
705 |
+
[2021-04-10 20:29:01] [config] optimizer-params:
|
706 |
+
[2021-04-10 20:29:01] [config] - 0.9
|
707 |
+
[2021-04-10 20:29:01] [config] - 0.98
|
708 |
+
[2021-04-10 20:29:01] [config] - 1e-09
|
709 |
+
[2021-04-10 20:29:01] [config] output-omit-bias: false
|
710 |
+
[2021-04-10 20:29:01] [config] overwrite: true
|
711 |
+
[2021-04-10 20:29:01] [config] precision:
|
712 |
+
[2021-04-10 20:29:01] [config] - float16
|
713 |
+
[2021-04-10 20:29:01] [config] - float32
|
714 |
+
[2021-04-10 20:29:01] [config] - float32
|
715 |
+
[2021-04-10 20:29:01] [config] pretrained-model: ""
|
716 |
+
[2021-04-10 20:29:01] [config] quantize-biases: false
|
717 |
+
[2021-04-10 20:29:01] [config] quantize-bits: 0
|
718 |
+
[2021-04-10 20:29:01] [config] quantize-log-based: false
|
719 |
+
[2021-04-10 20:29:01] [config] quantize-optimization-steps: 0
|
720 |
+
[2021-04-10 20:29:01] [config] quiet: false
|
721 |
+
[2021-04-10 20:29:01] [config] quiet-translation: false
|
722 |
+
[2021-04-10 20:29:01] [config] relative-paths: false
|
723 |
+
[2021-04-10 20:29:01] [config] right-left: false
|
724 |
+
[2021-04-10 20:29:01] [config] save-freq: 10000
|
725 |
+
[2021-04-10 20:29:01] [config] seed: 1111
|
726 |
+
[2021-04-10 20:29:01] [config] sentencepiece-alphas:
|
727 |
+
[2021-04-10 20:29:01] [config] []
|
728 |
+
[2021-04-10 20:29:01] [config] sentencepiece-max-lines: 2000000
|
729 |
+
[2021-04-10 20:29:01] [config] sentencepiece-options: ""
|
730 |
+
[2021-04-10 20:29:01] [config] shuffle: data
|
731 |
+
[2021-04-10 20:29:01] [config] shuffle-in-ram: false
|
732 |
+
[2021-04-10 20:29:01] [config] sigterm: save-and-exit
|
733 |
+
[2021-04-10 20:29:01] [config] skip: false
|
734 |
+
[2021-04-10 20:29:01] [config] sqlite: temporary
|
735 |
+
[2021-04-10 20:29:01] [config] sqlite-drop: false
|
736 |
+
[2021-04-10 20:29:01] [config] sync-sgd: true
|
737 |
+
[2021-04-10 20:29:01] [config] tempdir: /run/nvme/job_5481217/data
|
738 |
+
[2021-04-10 20:29:01] [config] tied-embeddings: false
|
739 |
+
[2021-04-10 20:29:01] [config] tied-embeddings-all: true
|
740 |
+
[2021-04-10 20:29:01] [config] tied-embeddings-src: false
|
741 |
+
[2021-04-10 20:29:01] [config] train-embedder-rank:
|
742 |
+
[2021-04-10 20:29:01] [config] []
|
743 |
+
[2021-04-10 20:29:01] [config] train-sets:
|
744 |
+
[2021-04-10 20:29:01] [config] - /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/train/opus+bt.src.clean.spm32k.gz
|
745 |
+
[2021-04-10 20:29:01] [config] - /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/train/opus+bt.trg.clean.spm32k.gz
|
746 |
+
[2021-04-10 20:29:01] [config] transformer-aan-activation: swish
|
747 |
+
[2021-04-10 20:29:01] [config] transformer-aan-depth: 2
|
748 |
+
[2021-04-10 20:29:01] [config] transformer-aan-nogate: false
|
749 |
+
[2021-04-10 20:29:01] [config] transformer-decoder-autoreg: self-attention
|
750 |
+
[2021-04-10 20:29:01] [config] transformer-depth-scaling: false
|
751 |
+
[2021-04-10 20:29:01] [config] transformer-dim-aan: 2048
|
752 |
+
[2021-04-10 20:29:01] [config] transformer-dim-ffn: 2048
|
753 |
+
[2021-04-10 20:29:01] [config] transformer-dropout: 0.1
|
754 |
+
[2021-04-10 20:29:01] [config] transformer-dropout-attention: 0
|
755 |
+
[2021-04-10 20:29:01] [config] transformer-dropout-ffn: 0
|
756 |
+
[2021-04-10 20:29:01] [config] transformer-ffn-activation: swish
|
757 |
+
[2021-04-10 20:29:01] [config] transformer-ffn-depth: 2
|
758 |
+
[2021-04-10 20:29:01] [config] transformer-guided-alignment-layer: last
|
759 |
+
[2021-04-10 20:29:01] [config] transformer-heads: 8
|
760 |
+
[2021-04-10 20:29:01] [config] transformer-no-projection: false
|
761 |
+
[2021-04-10 20:29:01] [config] transformer-pool: false
|
762 |
+
[2021-04-10 20:29:01] [config] transformer-postprocess: dan
|
763 |
+
[2021-04-10 20:29:01] [config] transformer-postprocess-emb: d
|
764 |
+
[2021-04-10 20:29:01] [config] transformer-postprocess-top: ""
|
765 |
+
[2021-04-10 20:29:01] [config] transformer-preprocess: ""
|
766 |
+
[2021-04-10 20:29:01] [config] transformer-tied-layers:
|
767 |
+
[2021-04-10 20:29:01] [config] []
|
768 |
+
[2021-04-10 20:29:01] [config] transformer-train-position-embeddings: false
|
769 |
+
[2021-04-10 20:29:01] [config] tsv: false
|
770 |
+
[2021-04-10 20:29:01] [config] tsv-fields: 0
|
771 |
+
[2021-04-10 20:29:01] [config] type: transformer
|
772 |
+
[2021-04-10 20:29:01] [config] ulr: false
|
773 |
+
[2021-04-10 20:29:01] [config] ulr-dim-emb: 0
|
774 |
+
[2021-04-10 20:29:01] [config] ulr-dropout: 0
|
775 |
+
[2021-04-10 20:29:01] [config] ulr-keys-vectors: ""
|
776 |
+
[2021-04-10 20:29:01] [config] ulr-query-vectors: ""
|
777 |
+
[2021-04-10 20:29:01] [config] ulr-softmax-temperature: 1
|
778 |
+
[2021-04-10 20:29:01] [config] ulr-trainable-transformation: false
|
779 |
+
[2021-04-10 20:29:01] [config] unlikelihood-loss: false
|
780 |
+
[2021-04-10 20:29:01] [config] valid-freq: 10000
|
781 |
+
[2021-04-10 20:29:01] [config] valid-log: /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.valid1.log
|
782 |
+
[2021-04-10 20:29:01] [config] valid-max-length: 1000
|
783 |
+
[2021-04-10 20:29:01] [config] valid-metrics:
|
784 |
+
[2021-04-10 20:29:01] [config] - perplexity
|
785 |
+
[2021-04-10 20:29:01] [config] valid-mini-batch: 16
|
786 |
+
[2021-04-10 20:29:01] [config] valid-reset-stalled: false
|
787 |
+
[2021-04-10 20:29:01] [config] valid-script-args:
|
788 |
+
[2021-04-10 20:29:01] [config] []
|
789 |
+
[2021-04-10 20:29:01] [config] valid-script-path: ""
|
790 |
+
[2021-04-10 20:29:01] [config] valid-sets:
|
791 |
+
[2021-04-10 20:29:01] [config] - /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/val/Tatoeba-dev.src.spm32k
|
792 |
+
[2021-04-10 20:29:01] [config] - /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/val/Tatoeba-dev.trg.spm32k
|
793 |
+
[2021-04-10 20:29:01] [config] valid-translation-output: ""
|
794 |
+
[2021-04-10 20:29:01] [config] version: v1.10.0 6f6d484 2021-02-06 15:35:16 -0800
|
795 |
+
[2021-04-10 20:29:01] [config] vocabs:
|
796 |
+
[2021-04-10 20:29:01] [config] - /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.vocab.yml
|
797 |
+
[2021-04-10 20:29:01] [config] - /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.vocab.yml
|
798 |
+
[2021-04-10 20:29:01] [config] word-penalty: 0
|
799 |
+
[2021-04-10 20:29:01] [config] word-scores: false
|
800 |
+
[2021-04-10 20:29:01] [config] workspace: 24000
|
801 |
+
[2021-04-10 20:29:01] [config] Loaded model has been created with Marian v1.10.0 6f6d484 2021-02-06 15:35:16 -0800
|
802 |
+
[2021-04-10 20:29:01] Using synchronous SGD
|
803 |
+
[2021-04-10 20:29:01] [data] Loading vocabulary from JSON/Yaml file /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.vocab.yml
|
804 |
+
[2021-04-10 20:29:02] [data] Setting vocabulary size for input 0 to 60,878
|
805 |
+
[2021-04-10 20:29:02] [data] Loading vocabulary from JSON/Yaml file /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.vocab.yml
|
806 |
+
[2021-04-10 20:29:02] [data] Setting vocabulary size for input 1 to 60,878
|
807 |
+
[2021-04-10 20:29:02] [data] Using word alignments from file /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/train/opus+bt.spm32k-spm32k.src-trg.alg.gz
|
808 |
+
[2021-04-10 20:29:02] [sqlite] Creating temporary database in /run/nvme/job_5481217/data
|
809 |
+
[2021-04-10 20:29:06] [sqlite] Inserted 1000000 lines
|
810 |
+
[2021-04-10 20:29:10] [sqlite] Inserted 2000000 lines
|
811 |
+
[2021-04-10 20:29:18] [sqlite] Inserted 4000000 lines
|
812 |
+
[2021-04-10 20:29:33] [sqlite] Inserted 8000000 lines
|
813 |
+
[2021-04-10 20:30:05] [sqlite] Inserted 16000000 lines
|
814 |
+
[2021-04-10 20:31:08] [sqlite] Inserted 32000000 lines
|
815 |
+
[2021-04-10 20:32:38] [sqlite] Inserted 64000000 lines
|
816 |
+
[2021-04-10 20:34:20] [sqlite] Inserted 90910822 lines
|
817 |
+
[2021-04-10 20:34:20] [sqlite] Creating primary index
|
818 |
+
[2021-04-10 20:35:07] [comm] Compiled without MPI support. Running as a single process on r13g05.bullx
|
819 |
+
[2021-04-10 20:35:07] [batching] Collecting statistics for batch fitting with step size 10
|
820 |
+
[2021-04-10 20:35:20] [memory] Extending reserved space to 24064 MB (device gpu0)
|
821 |
+
[2021-04-10 20:35:20] [memory] Extending reserved space to 24064 MB (device gpu1)
|
822 |
+
[2021-04-10 20:35:21] [memory] Extending reserved space to 24064 MB (device gpu2)
|
823 |
+
[2021-04-10 20:35:21] [memory] Extending reserved space to 24064 MB (device gpu3)
|
824 |
+
[2021-04-10 20:35:21] [comm] Using NCCL 2.8.3 for GPU communication
|
825 |
+
[2021-04-10 20:35:23] [comm] NCCLCommunicator constructed successfully
|
826 |
+
[2021-04-10 20:35:23] [training] Using 4 GPUs
|
827 |
+
[2021-04-10 20:35:23] [logits] Applying loss function for 1 factor(s)
|
828 |
+
[2021-04-10 20:35:23] [memory] Reserving 287 MB, device gpu0
|
829 |
+
[2021-04-10 20:35:23] [gpu] 16-bit TensorCores enabled for float32 matrix operations
|
830 |
+
[2021-04-10 20:35:24] [memory] Reserving 287 MB, device gpu0
|
831 |
+
[2021-04-10 20:38:11] [batching] Done. Typical MB size is 57,739 target words
|
832 |
+
[2021-04-10 20:38:11] [memory] Extending reserved space to 24064 MB (device gpu0)
|
833 |
+
[2021-04-10 20:38:11] [memory] Extending reserved space to 24064 MB (device gpu1)
|
834 |
+
[2021-04-10 20:38:11] [memory] Extending reserved space to 24064 MB (device gpu2)
|
835 |
+
[2021-04-10 20:38:11] [memory] Extending reserved space to 24064 MB (device gpu3)
|
836 |
+
[2021-04-10 20:38:11] [comm] Using NCCL 2.8.3 for GPU communication
|
837 |
+
[2021-04-10 20:38:13] [comm] NCCLCommunicator constructed successfully
|
838 |
+
[2021-04-10 20:38:13] [training] Using 4 GPUs
|
839 |
+
[2021-04-10 20:38:13] Loading model from /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
840 |
+
[2021-04-10 20:38:13] Loading model from /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
841 |
+
[2021-04-10 20:38:14] Loading model from /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
842 |
+
[2021-04-10 20:38:14] Loading model from /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
843 |
+
[2021-04-10 20:38:15] Loading Adam parameters from /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
844 |
+
[2021-04-10 20:38:16] [memory] Reserving 143 MB, device gpu0
|
845 |
+
[2021-04-10 20:38:16] [memory] Reserving 143 MB, device gpu1
|
846 |
+
[2021-04-10 20:38:16] [memory] Reserving 143 MB, device gpu2
|
847 |
+
[2021-04-10 20:38:16] [memory] Reserving 143 MB, device gpu3
|
848 |
+
[2021-04-10 20:38:17] [training] Model reloaded from /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
849 |
+
[2021-04-10 20:38:17] [data] Restoring the corpus state to epoch 1, batch 20000
|
850 |
+
[2021-04-10 20:38:17] [sqlite] Selecting shuffled data
|
851 |
+
[2021-04-10 20:47:06] Training started
|
852 |
+
[2021-04-10 20:47:59] [training] Batches are processed as 1 process(es) x 4 devices/process
|
853 |
+
[2021-04-10 20:47:59] [memory] Reserving 287 MB, device gpu0
|
854 |
+
[2021-04-10 20:47:59] [memory] Reserving 287 MB, device gpu1
|
855 |
+
[2021-04-10 20:47:59] [memory] Reserving 287 MB, device gpu3
|
856 |
+
[2021-04-10 20:47:59] [memory] Reserving 287 MB, device gpu2
|
857 |
+
[2021-04-10 20:47:59] [memory] Reserving 287 MB, device gpu1
|
858 |
+
[2021-04-10 20:47:59] [memory] Reserving 287 MB, device gpu0
|
859 |
+
[2021-04-10 20:47:59] [memory] Reserving 287 MB, device gpu2
|
860 |
+
[2021-04-10 20:47:59] [memory] Reserving 287 MB, device gpu3
|
861 |
+
[2021-04-10 20:47:59] Loading model from /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
862 |
+
[2021-04-10 20:48:01] [memory] Reserving 287 MB, device cpu0
|
863 |
+
[2021-04-10 20:48:01] [memory] Reserving 71 MB, device gpu0
|
864 |
+
[2021-04-10 20:48:01] [memory] Reserving 71 MB, device gpu1
|
865 |
+
[2021-04-10 20:48:01] [memory] Reserving 71 MB, device gpu2
|
866 |
+
[2021-04-10 20:48:01] [memory] Reserving 71 MB, device gpu3
|
867 |
+
[2021-04-10 22:53:55] Ep. 1 : Up. 30000 : Sen. 23,809,627 : Cost 0.37059498 * 1,537,476,548 @ 19,307 after 4,611,814,367 : Time 8144.22s : 23660.41 words/s : L.r. 2.1909e-04
|
868 |
+
[2021-04-10 22:53:55] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
869 |
+
[2021-04-10 22:53:57] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
870 |
+
[2021-04-10 22:53:59] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
871 |
+
[2021-04-10 22:54:06] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
872 |
+
[2021-04-10 22:54:07] [valid] Ep. 1 : Up. 30000 : perplexity : 2.74846 : new best
|
873 |
+
[2021-04-11 01:00:17] Ep. 1 : Up. 40000 : Sen. 31,775,328 : Cost 0.35317293 * 1,542,109,245 @ 10,837 after 6,153,923,612 : Time 7581.80s : 25443.81 words/s : L.r. 1.8974e-04
|
874 |
+
[2021-04-11 01:00:17] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
875 |
+
[2021-04-11 01:00:19] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
876 |
+
[2021-04-11 01:00:21] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
877 |
+
[2021-04-11 01:00:27] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
878 |
+
[2021-04-11 01:00:28] [valid] Ep. 1 : Up. 40000 : perplexity : 2.57709 : new best
|
879 |
+
[2021-04-11 03:06:32] Ep. 1 : Up. 50000 : Sen. 39,739,557 : Cost 0.34560379 * 1,537,112,128 @ 25,070 after 7,691,035,740 : Time 7574.64s : 25455.78 words/s : L.r. 1.6971e-04
|
880 |
+
[2021-04-11 03:06:32] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
881 |
+
[2021-04-11 03:06:33] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
882 |
+
[2021-04-11 03:06:35] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
883 |
+
[2021-04-11 03:06:41] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
884 |
+
[2021-04-11 03:06:42] [valid] Ep. 1 : Up. 50000 : perplexity : 2.48823 : new best
|
885 |
+
[2021-04-11 05:13:04] Ep. 1 : Up. 60000 : Sen. 47,717,024 : Cost 0.33947721 * 1,544,423,387 @ 29,067 after 9,235,459,127 : Time 7591.92s : 25468.52 words/s : L.r. 1.5492e-04
|
886 |
+
[2021-04-11 05:13:04] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
887 |
+
[2021-04-11 05:13:05] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
888 |
+
[2021-04-11 05:13:07] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
889 |
+
[2021-04-11 05:13:13] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
890 |
+
[2021-04-11 05:13:14] [valid] Ep. 1 : Up. 60000 : perplexity : 2.42648 : new best
|
891 |
+
[2021-04-11 07:19:16] Ep. 1 : Up. 70000 : Sen. 55,678,568 : Cost 0.33626568 * 1,536,569,921 @ 14,568 after 10,772,029,048 : Time 7572.11s : 25449.14 words/s : L.r. 1.4343e-04
|
892 |
+
[2021-04-11 07:19:16] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
893 |
+
[2021-04-11 07:19:18] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
894 |
+
[2021-04-11 07:19:19] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
895 |
+
[2021-04-11 07:19:26] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
896 |
+
[2021-04-11 07:19:26] [valid] Ep. 1 : Up. 70000 : perplexity : 2.38283 : new best
|
897 |
+
[2021-04-11 09:25:48] Ep. 1 : Up. 80000 : Sen. 63,653,841 : Cost 0.33299673 * 1,541,968,996 @ 16,641 after 12,313,998,044 : Time 7591.60s : 25446.59 words/s : L.r. 1.3416e-04
|
898 |
+
[2021-04-11 09:25:48] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
899 |
+
[2021-04-11 09:25:50] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
900 |
+
[2021-04-11 09:25:51] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
901 |
+
[2021-04-11 09:25:58] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
902 |
+
[2021-04-11 09:25:59] [valid] Ep. 1 : Up. 80000 : perplexity : 2.35194 : new best
|
903 |
+
[2021-04-11 11:32:04] Ep. 1 : Up. 90000 : Sen. 71,619,033 : Cost 0.33060694 * 1,540,330,654 @ 16,817 after 13,854,328,698 : Time 7575.88s : 25458.47 words/s : L.r. 1.2649e-04
|
904 |
+
[2021-04-11 11:32:04] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
905 |
+
[2021-04-11 11:32:06] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
906 |
+
[2021-04-11 11:32:08] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
907 |
+
[2021-04-11 11:32:15] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
908 |
+
[2021-04-11 11:32:15] [valid] Ep. 1 : Up. 90000 : perplexity : 2.3261 : new best
|
909 |
+
[2021-04-11 13:38:41] Ep. 1 : Up. 100000 : Sen. 79,596,476 : Cost 0.32837385 * 1,544,307,939 @ 14,566 after 15,398,636,637 : Time 7596.82s : 25424.10 words/s : L.r. 1.2000e-04
|
910 |
+
[2021-04-11 13:38:41] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
911 |
+
[2021-04-11 13:38:43] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
912 |
+
[2021-04-11 13:38:45] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
913 |
+
[2021-04-11 13:38:51] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
914 |
+
[2021-04-11 13:38:52] [valid] Ep. 1 : Up. 100000 : perplexity : 2.30587 : new best
|
915 |
+
[2021-04-11 15:45:05] Ep. 1 : Up. 110000 : Sen. 87,552,837 : Cost 0.32707089 * 1,539,439,336 @ 23,740 after 16,938,075,973 : Time 7584.29s : 25408.62 words/s : L.r. 1.1442e-04
|
916 |
+
[2021-04-11 15:45:05] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
917 |
+
[2021-04-11 15:45:08] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
918 |
+
[2021-04-11 15:45:09] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
919 |
+
[2021-04-11 15:45:16] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
920 |
+
[2021-04-11 15:45:17] [valid] Ep. 1 : Up. 110000 : perplexity : 2.28881 : new best
|
921 |
+
[2021-04-11 16:38:26] Seen 90910822 samples
|
922 |
+
[2021-04-11 16:38:26] Starting data epoch 2 in logical epoch 2
|
923 |
+
[2021-04-11 16:38:26] [sqlite] Selecting shuffled data
|
924 |
+
[2021-04-11 17:53:05] Ep. 2 : Up. 120000 : Sen. 4,604,102 : Cost 0.32645020 * 1,535,115,036 @ 21,005 after 18,473,191,009 : Time 7678.92s : 25099.86 words/s : L.r. 1.0954e-04
|
925 |
+
[2021-04-11 17:53:05] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
926 |
+
[2021-04-11 17:53:07] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
927 |
+
[2021-04-11 17:53:09] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
928 |
+
[2021-04-11 17:53:15] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
929 |
+
[2021-04-11 17:53:16] [valid] Ep. 2 : Up. 120000 : perplexity : 2.27459 : new best
|
930 |
+
[2021-04-11 19:59:32] Ep. 2 : Up. 130000 : Sen. 12,580,452 : Cost 0.32457662 * 1,541,876,924 @ 17,454 after 20,015,067,933 : Time 7587.52s : 25462.31 words/s : L.r. 1.0525e-04
|
931 |
+
[2021-04-11 19:59:32] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
932 |
+
[2021-04-11 19:59:34] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
933 |
+
[2021-04-11 19:59:36] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
934 |
+
[2021-04-11 19:59:42] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
935 |
+
[2021-04-11 19:59:43] [valid] Ep. 2 : Up. 130000 : perplexity : 2.2625 : new best
|
936 |
+
[2021-04-11 22:05:59] Ep. 2 : Up. 140000 : Sen. 20,552,521 : Cost 0.32394958 * 1,540,359,478 @ 32,921 after 21,555,427,411 : Time 7586.69s : 25455.14 words/s : L.r. 1.0142e-04
|
937 |
+
[2021-04-11 22:05:59] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
938 |
+
[2021-04-11 22:06:01] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
939 |
+
[2021-04-11 22:06:03] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
940 |
+
[2021-04-11 22:06:09] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
941 |
+
[2021-04-11 22:06:10] [valid] Ep. 2 : Up. 140000 : perplexity : 2.25244 : new best
|
942 |
+
[2021-04-12 00:12:10] Ep. 2 : Up. 150000 : Sen. 28,516,342 : Cost 0.32292783 * 1,538,883,913 @ 15,698 after 23,094,311,324 : Time 7571.13s : 25470.97 words/s : L.r. 9.7980e-05
|
943 |
+
[2021-04-12 00:12:10] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
944 |
+
[2021-04-12 00:12:12] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
945 |
+
[2021-04-12 00:12:14] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
946 |
+
[2021-04-12 00:12:20] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
947 |
+
[2021-04-12 00:12:21] [valid] Ep. 2 : Up. 150000 : perplexity : 2.24363 : new best
|
948 |
+
[2021-04-12 02:18:19] Ep. 2 : Up. 160000 : Sen. 36,472,921 : Cost 0.32204559 * 1,538,444,007 @ 14,634 after 24,632,755,331 : Time 7568.51s : 25454.85 words/s : L.r. 9.4868e-05
|
949 |
+
[2021-04-12 02:18:19] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
950 |
+
[2021-04-12 02:18:21] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
951 |
+
[2021-04-12 02:18:23] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
952 |
+
[2021-04-12 02:18:29] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
953 |
+
[2021-04-12 02:18:30] [valid] Ep. 2 : Up. 160000 : perplexity : 2.23516 : new best
|
954 |
+
[2021-04-12 04:24:41] Ep. 2 : Up. 170000 : Sen. 44,432,707 : Cost 0.32138234 * 1,539,440,066 @ 20,548 after 26,172,195,397 : Time 7582.03s : 25428.92 words/s : L.r. 9.2036e-05
|
955 |
+
[2021-04-12 04:24:41] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
956 |
+
[2021-04-12 04:24:43] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
957 |
+
[2021-04-12 04:24:45] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
958 |
+
[2021-04-12 04:24:51] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
959 |
+
[2021-04-12 04:24:52] [valid] Ep. 2 : Up. 170000 : perplexity : 2.22792 : new best
|
960 |
+
[2021-04-12 06:30:29] Ep. 2 : Up. 180000 : Sen. 52,350,704 : Cost 0.32022747 * 1,534,576,184 @ 19,611 after 27,706,771,581 : Time 7547.89s : 25419.56 words/s : L.r. 8.9443e-05
|
961 |
+
[2021-04-12 06:30:29] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
962 |
+
[2021-04-12 06:30:31] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
963 |
+
[2021-04-12 06:30:33] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
964 |
+
[2021-04-12 06:30:39] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
965 |
+
[2021-04-12 06:30:40] [valid] Ep. 2 : Up. 180000 : perplexity : 2.22167 : new best
|
966 |
+
[2021-04-12 08:36:55] Ep. 2 : Up. 190000 : Sen. 60,324,297 : Cost 0.32058367 * 1,540,880,487 @ 8,037 after 29,247,652,068 : Time 7585.61s : 25468.44 words/s : L.r. 8.7057e-05
|
967 |
+
[2021-04-12 08:36:55] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
968 |
+
[2021-04-12 08:36:57] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
969 |
+
[2021-04-12 08:36:59] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
970 |
+
[2021-04-12 08:37:05] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
971 |
+
[2021-04-12 08:37:06] [valid] Ep. 2 : Up. 190000 : perplexity : 2.2181 : new best
|
972 |
+
[2021-04-12 10:43:15] Ep. 2 : Up. 200000 : Sen. 68,291,176 : Cost 0.31953624 * 1,540,464,828 @ 14,844 after 30,788,116,896 : Time 7580.26s : 25442.26 words/s : L.r. 8.4853e-05
|
973 |
+
[2021-04-12 10:43:16] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
974 |
+
[2021-04-12 10:43:17] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
975 |
+
[2021-04-12 10:43:19] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
976 |
+
[2021-04-12 10:43:25] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
977 |
+
[2021-04-12 10:43:26] [valid] Ep. 2 : Up. 200000 : perplexity : 2.21232 : new best
|
978 |
+
[2021-04-12 12:49:55] Ep. 2 : Up. 210000 : Sen. 76,282,171 : Cost 0.31941819 * 1,543,300,054 @ 17,957 after 32,331,416,950 : Time 7599.36s : 25456.48 words/s : L.r. 8.2808e-05
|
979 |
+
[2021-04-12 12:49:55] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
980 |
+
[2021-04-12 12:49:57] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
981 |
+
[2021-04-12 12:50:00] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
982 |
+
[2021-04-12 12:50:06] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
983 |
+
[2021-04-12 12:50:07] [valid] Ep. 2 : Up. 210000 : perplexity : 2.20852 : new best
|
984 |
+
[2021-04-12 14:56:06] Ep. 2 : Up. 220000 : Sen. 84,244,657 : Cost 0.31943667 * 1,537,478,561 @ 18,972 after 33,868,895,511 : Time 7571.45s : 25486.67 words/s : L.r. 8.0904e-05
|
985 |
+
[2021-04-12 14:56:06] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
986 |
+
[2021-04-12 14:56:08] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
987 |
+
[2021-04-12 14:56:10] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
988 |
+
[2021-04-12 14:56:16] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
989 |
+
[2021-04-12 14:56:17] [valid] Ep. 2 : Up. 220000 : perplexity : 2.2026 : new best
|
990 |
+
[2021-04-12 16:41:55] Seen 90910822 samples
|
991 |
+
[2021-04-12 16:41:55] Starting data epoch 3 in logical epoch 3
|
992 |
+
[2021-04-12 16:41:55] [sqlite] Selecting shuffled data
|
993 |
+
[2021-04-12 17:04:47] Ep. 3 : Up. 230000 : Sen. 1,324,712 : Cost 0.31791928 * 1,546,400,571 @ 27,844 after 35,415,296,082 : Time 7720.20s : 25068.34 words/s : L.r. 7.9126e-05
|
994 |
+
[2021-04-12 17:04:47] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
995 |
+
[2021-04-12 17:04:48] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
996 |
+
[2021-04-12 17:04:50] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
997 |
+
[2021-04-12 17:04:56] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
998 |
+
[2021-04-12 17:04:57] [valid] Ep. 3 : Up. 230000 : perplexity : 2.19778 : new best
|
999 |
+
[2021-04-12 19:10:47] Ep. 3 : Up. 240000 : Sen. 9,250,409 : Cost 0.31746748 * 1,532,637,149 @ 31,358 after 36,947,933,231 : Time 7559.74s : 25399.67 words/s : L.r. 7.7460e-05
|
1000 |
+
[2021-04-12 19:10:47] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
1001 |
+
[2021-04-12 19:10:49] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
1002 |
+
[2021-04-12 19:10:50] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
1003 |
+
[2021-04-12 19:10:57] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
1004 |
+
[2021-04-12 19:10:58] [valid] Ep. 3 : Up. 240000 : perplexity : 2.19337 : new best
|
1005 |
+
[2021-04-12 21:16:59] Ep. 3 : Up. 250000 : Sen. 17,210,132 : Cost 0.31740338 * 1,537,883,637 @ 28,824 after 38,485,816,868 : Time 7572.71s : 25453.29 words/s : L.r. 7.5895e-05
|
1006 |
+
[2021-04-12 21:16:59] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
1007 |
+
[2021-04-12 21:17:01] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
1008 |
+
[2021-04-12 21:17:03] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
1009 |
+
[2021-04-12 21:17:09] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
1010 |
+
[2021-04-12 21:17:10] [valid] Ep. 3 : Up. 250000 : perplexity : 2.19031 : new best
|
1011 |
+
[2021-04-12 23:22:59] Ep. 3 : Up. 260000 : Sen. 25,147,658 : Cost 0.31638047 * 1,537,429,791 @ 9,960 after 40,023,246,659 : Time 7559.84s : 25428.46 words/s : L.r. 7.4421e-05
|
1012 |
+
[2021-04-12 23:22:59] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
1013 |
+
[2021-04-12 23:23:01] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
1014 |
+
[2021-04-12 23:23:03] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
1015 |
+
[2021-04-12 23:23:10] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
1016 |
+
[2021-04-12 23:23:12] [valid] Ep. 3 : Up. 260000 : perplexity : 2.18651 : new best
|
1017 |
+
[2021-04-13 01:29:56] Ep. 3 : Up. 270000 : Sen. 33,152,000 : Cost 0.31686807 * 1,546,803,123 @ 13,477 after 41,570,049,782 : Time 7617.21s : 25453.55 words/s : L.r. 7.3030e-05
|
1018 |
+
[2021-04-13 01:29:57] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
1019 |
+
[2021-04-13 01:29:58] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
1020 |
+
[2021-04-13 01:30:01] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
1021 |
+
[2021-04-13 01:30:08] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
1022 |
+
[2021-04-13 01:30:09] [valid] Ep. 3 : Up. 270000 : perplexity : 2.18395 : new best
|
1023 |
+
[2021-04-13 03:36:03] Ep. 3 : Up. 280000 : Sen. 41,096,689 : Cost 0.31580126 * 1,538,901,399 @ 24,387 after 43,108,951,181 : Time 7566.69s : 25428.48 words/s : L.r. 7.1714e-05
|
1024 |
+
[2021-04-13 03:36:03] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
1025 |
+
[2021-04-13 03:36:05] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
1026 |
+
[2021-04-13 03:36:07] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
1027 |
+
[2021-04-13 03:36:13] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
1028 |
+
[2021-04-13 03:36:14] [valid] Ep. 3 : Up. 280000 : perplexity : 2.1804 : new best
|
1029 |
+
[2021-04-13 05:42:22] Ep. 3 : Up. 290000 : Sen. 49,055,760 : Cost 0.31599799 * 1,539,976,265 @ 23,622 after 44,648,927,446 : Time 7579.18s : 25439.95 words/s : L.r. 7.0466e-05
|
1030 |
+
[2021-04-13 05:42:22] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
1031 |
+
[2021-04-13 05:42:24] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
1032 |
+
[2021-04-13 05:42:26] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
1033 |
+
[2021-04-13 05:42:32] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
1034 |
+
[2021-04-13 05:42:33] [valid] Ep. 3 : Up. 290000 : perplexity : 2.17926 : new best
|
1035 |
+
[2021-04-13 07:48:58] Ep. 3 : Up. 300000 : Sen. 57,026,608 : Cost 0.31556690 * 1,543,156,902 @ 14,832 after 46,192,084,348 : Time 7595.09s : 25429.63 words/s : L.r. 6.9282e-05
|
1036 |
+
[2021-04-13 07:48:58] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
1037 |
+
[2021-04-13 07:49:00] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
1038 |
+
[2021-04-13 07:49:01] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
1039 |
+
[2021-04-13 07:49:08] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
1040 |
+
[2021-04-13 07:49:09] [valid] Ep. 3 : Up. 300000 : perplexity : 2.17514 : new best
|
1041 |
+
[2021-04-13 09:55:31] Ep. 3 : Up. 310000 : Sen. 65,011,920 : Cost 0.31567365 * 1,543,413,751 @ 21,824 after 47,735,498,099 : Time 7593.40s : 25471.66 words/s : L.r. 6.8155e-05
|
1042 |
+
[2021-04-13 09:55:31] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
1043 |
+
[2021-04-13 09:55:33] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
1044 |
+
[2021-04-13 09:55:35] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
1045 |
+
[2021-04-13 09:55:42] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
1046 |
+
[2021-04-13 09:55:42] [valid] Ep. 3 : Up. 310000 : perplexity : 2.1733 : new best
|
1047 |
+
[2021-04-13 12:02:17] Ep. 3 : Up. 320000 : Sen. 73,002,709 : Cost 0.31527013 * 1,543,823,232 @ 15,555 after 49,279,321,331 : Time 7605.54s : 25414.73 words/s : L.r. 6.7082e-05
|
1048 |
+
[2021-04-13 12:02:17] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
1049 |
+
[2021-04-13 12:02:19] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
1050 |
+
[2021-04-13 12:02:21] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
1051 |
+
[2021-04-13 12:02:28] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
1052 |
+
[2021-04-13 12:02:29] [valid] Ep. 3 : Up. 320000 : perplexity : 2.17204 : new best
|
1053 |
+
[2021-04-13 14:08:32] Ep. 3 : Up. 330000 : Sen. 80,947,276 : Cost 0.31495792 * 1,537,202,843 @ 22,330 after 50,816,524,174 : Time 7575.18s : 25407.78 words/s : L.r. 6.6058e-05
|
1054 |
+
[2021-04-13 14:08:32] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
1055 |
+
[2021-04-13 14:08:34] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
1056 |
+
[2021-04-13 14:08:36] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
1057 |
+
[2021-04-13 14:08:42] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
1058 |
+
[2021-04-13 14:08:43] [valid] Ep. 3 : Up. 330000 : perplexity : 2.17051 : new best
|
1059 |
+
[2021-04-13 16:15:29] Ep. 3 : Up. 340000 : Sen. 88,952,868 : Cost 0.31548429 * 1,544,234,194 @ 20,283 after 52,360,758,368 : Time 7616.75s : 25446.45 words/s : L.r. 6.5079e-05
|
1060 |
+
[2021-04-13 16:15:29] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
1061 |
+
[2021-04-13 16:15:31] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
1062 |
+
[2021-04-13 16:15:33] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
1063 |
+
[2021-04-13 16:15:40] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
1064 |
+
[2021-04-13 16:15:41] [valid] Ep. 3 : Up. 340000 : perplexity : 2.16648 : new best
|
1065 |
+
[2021-04-13 16:46:46] Seen 90910822 samples
|
1066 |
+
[2021-04-13 16:46:46] Starting data epoch 4 in logical epoch 4
|
1067 |
+
[2021-04-13 16:46:46] [sqlite] Selecting shuffled data
|
1068 |
+
[2021-04-13 18:23:07] Ep. 4 : Up. 350000 : Sen. 5,952,000 : Cost 0.31392208 * 1,531,321,712 @ 9,800 after 53,892,080,080 : Time 7657.78s : 25022.05 words/s : L.r. 6.4143e-05
|
1069 |
+
[2021-04-13 18:23:07] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.orig.npz
|
1070 |
+
[2021-04-13 18:23:09] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz
|
1071 |
+
[2021-04-13 18:23:11] Saving Adam parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.optimizer.npz
|
1072 |
+
[2021-04-13 18:23:18] Saving model weights and runtime parameters to /users/tiedeman/research/Opus-MT-train/work-tatoeba/eng-rus/opus+bt.spm32k-spm32k.transformer-align.model1.npz.best-perplexity.npz
|
1073 |
+
[2021-04-13 18:23:19] [valid] Ep. 4 : Up. 350000 : perplexity : 2.16644 : new best
|
opus+bt.spm32k-spm32k.transformer-align.valid1.log
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[2021-03-29 00:40:17] [valid] Ep. 1 : Up. 10000 : perplexity : 6.61571 : new best
|
2 |
+
[2021-04-02 22:25:48] [valid] Ep. 1 : Up. 20000 : perplexity : 3.20918 : new best
|
3 |
+
[2021-04-10 22:54:07] [valid] Ep. 1 : Up. 30000 : perplexity : 2.74846 : new best
|
4 |
+
[2021-04-11 01:00:28] [valid] Ep. 1 : Up. 40000 : perplexity : 2.57709 : new best
|
5 |
+
[2021-04-11 03:06:42] [valid] Ep. 1 : Up. 50000 : perplexity : 2.48823 : new best
|
6 |
+
[2021-04-11 05:13:14] [valid] Ep. 1 : Up. 60000 : perplexity : 2.42648 : new best
|
7 |
+
[2021-04-11 07:19:26] [valid] Ep. 1 : Up. 70000 : perplexity : 2.38283 : new best
|
8 |
+
[2021-04-11 09:25:59] [valid] Ep. 1 : Up. 80000 : perplexity : 2.35194 : new best
|
9 |
+
[2021-04-11 11:32:15] [valid] Ep. 1 : Up. 90000 : perplexity : 2.3261 : new best
|
10 |
+
[2021-04-11 13:38:52] [valid] Ep. 1 : Up. 100000 : perplexity : 2.30587 : new best
|
11 |
+
[2021-04-11 15:45:17] [valid] Ep. 1 : Up. 110000 : perplexity : 2.28881 : new best
|
12 |
+
[2021-04-11 17:53:16] [valid] Ep. 2 : Up. 120000 : perplexity : 2.27459 : new best
|
13 |
+
[2021-04-11 19:59:43] [valid] Ep. 2 : Up. 130000 : perplexity : 2.2625 : new best
|
14 |
+
[2021-04-11 22:06:10] [valid] Ep. 2 : Up. 140000 : perplexity : 2.25244 : new best
|
15 |
+
[2021-04-12 00:12:21] [valid] Ep. 2 : Up. 150000 : perplexity : 2.24363 : new best
|
16 |
+
[2021-04-12 02:18:30] [valid] Ep. 2 : Up. 160000 : perplexity : 2.23516 : new best
|
17 |
+
[2021-04-12 04:24:52] [valid] Ep. 2 : Up. 170000 : perplexity : 2.22792 : new best
|
18 |
+
[2021-04-12 06:30:40] [valid] Ep. 2 : Up. 180000 : perplexity : 2.22167 : new best
|
19 |
+
[2021-04-12 08:37:06] [valid] Ep. 2 : Up. 190000 : perplexity : 2.2181 : new best
|
20 |
+
[2021-04-12 10:43:26] [valid] Ep. 2 : Up. 200000 : perplexity : 2.21232 : new best
|
21 |
+
[2021-04-12 12:50:07] [valid] Ep. 2 : Up. 210000 : perplexity : 2.20852 : new best
|
22 |
+
[2021-04-12 14:56:17] [valid] Ep. 2 : Up. 220000 : perplexity : 2.2026 : new best
|
23 |
+
[2021-04-12 17:04:57] [valid] Ep. 3 : Up. 230000 : perplexity : 2.19778 : new best
|
24 |
+
[2021-04-12 19:10:58] [valid] Ep. 3 : Up. 240000 : perplexity : 2.19337 : new best
|
25 |
+
[2021-04-12 21:17:10] [valid] Ep. 3 : Up. 250000 : perplexity : 2.19031 : new best
|
26 |
+
[2021-04-12 23:23:12] [valid] Ep. 3 : Up. 260000 : perplexity : 2.18651 : new best
|
27 |
+
[2021-04-13 01:30:09] [valid] Ep. 3 : Up. 270000 : perplexity : 2.18395 : new best
|
28 |
+
[2021-04-13 03:36:14] [valid] Ep. 3 : Up. 280000 : perplexity : 2.1804 : new best
|
29 |
+
[2021-04-13 05:42:33] [valid] Ep. 3 : Up. 290000 : perplexity : 2.17926 : new best
|
30 |
+
[2021-04-13 07:49:09] [valid] Ep. 3 : Up. 300000 : perplexity : 2.17514 : new best
|
31 |
+
[2021-04-13 09:55:42] [valid] Ep. 3 : Up. 310000 : perplexity : 2.1733 : new best
|
32 |
+
[2021-04-13 12:02:29] [valid] Ep. 3 : Up. 320000 : perplexity : 2.17204 : new best
|
33 |
+
[2021-04-13 14:08:43] [valid] Ep. 3 : Up. 330000 : perplexity : 2.17051 : new best
|
34 |
+
[2021-04-13 16:15:41] [valid] Ep. 3 : Up. 340000 : perplexity : 2.16648 : new best
|
35 |
+
[2021-04-13 18:23:19] [valid] Ep. 4 : Up. 350000 : perplexity : 2.16644 : new best
|
pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e1b41a3f3c3e2e889cb25b1e3836145a6464c5669caa360581ebe5bd0043c381
|
3 |
+
size 213160003
|
special_tokens_map.json
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eos_token": "</s>",
|
3 |
+
"pad_token": "<pad>",
|
4 |
+
"unk_token": "<unk>"
|
5 |
+
}
|
tokenizer_config.json
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"eos_token": "</s>",
|
3 |
+
"model_max_length": 512,
|
4 |
+
"name_or_path": "models/opus-mt-tc-base-en-ru",
|
5 |
+
"pad_token": "<pad>",
|
6 |
+
"separate_vocabs": false,
|
7 |
+
"source_lang": "opus-mt-tc-base-en",
|
8 |
+
"sp_model_kwargs": {},
|
9 |
+
"special_tokens_map_file": null,
|
10 |
+
"target_lang": "ru",
|
11 |
+
"tokenizer_class": "MarianTokenizer",
|
12 |
+
"unk_token": "<unk>"
|
13 |
+
}
|
vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|