tiedeman's picture
Initial commit
70c4613
---
library_name: transformers
language:
- anp
- as
- awa
- bho
- bn
- bpy
- de
- dv
- en
- es
- fr
- gbm
- gu
- hi
- hif
- hne
- hns
- kok
- ks
- lah
- mag
- mai
- mr
- ne
- or
- pa
- pi
- pt
- rhg
- rmy
- rom
- sa
- sd
- si
- skr
- syl
- ur
tags:
- translation
- opus-mt-tc-bible
license: apache-2.0
model-index:
- name: opus-mt-tc-bible-big-inc-deu_eng_fra_por_spa
results:
- task:
name: Translation asm-eng
type: translation
args: asm-eng
dataset:
name: flores200-devtest
type: flores200-devtest
args: asm-eng
metrics:
- name: BLEU
type: bleu
value: 21.9
- name: chr-F
type: chrf
value: 0.48584
- task:
name: Translation asm-por
type: translation
args: asm-por
dataset:
name: flores200-devtest
type: flores200-devtest
args: asm-por
metrics:
- name: BLEU
type: bleu
value: 10.1
- name: chr-F
type: chrf
value: 0.35028
- task:
name: Translation awa-deu
type: translation
args: awa-deu
dataset:
name: flores200-devtest
type: flores200-devtest
args: awa-deu
metrics:
- name: BLEU
type: bleu
value: 16.5
- name: chr-F
type: chrf
value: 0.47173
- task:
name: Translation awa-eng
type: translation
args: awa-eng
dataset:
name: flores200-devtest
type: flores200-devtest
args: awa-eng
metrics:
- name: BLEU
type: bleu
value: 24.5
- name: chr-F
type: chrf
value: 0.50582
- task:
name: Translation awa-fra
type: translation
args: awa-fra
dataset:
name: flores200-devtest
type: flores200-devtest
args: awa-fra
metrics:
- name: BLEU
type: bleu
value: 21.4
- name: chr-F
type: chrf
value: 0.49682
- task:
name: Translation awa-por
type: translation
args: awa-por
dataset:
name: flores200-devtest
type: flores200-devtest
args: awa-por
metrics:
- name: BLEU
type: bleu
value: 21.5
- name: chr-F
type: chrf
value: 0.49663
- task:
name: Translation awa-spa
type: translation
args: awa-spa
dataset:
name: flores200-devtest
type: flores200-devtest
args: awa-spa
metrics:
- name: BLEU
type: bleu
value: 15.1
- name: chr-F
type: chrf
value: 0.43740
- task:
name: Translation ben-deu
type: translation
args: ben-deu
dataset:
name: flores200-devtest
type: flores200-devtest
args: ben-deu
metrics:
- name: BLEU
type: bleu
value: 16.6
- name: chr-F
type: chrf
value: 0.47330
- task:
name: Translation ben-eng
type: translation
args: ben-eng
dataset:
name: flores200-devtest
type: flores200-devtest
args: ben-eng
metrics:
- name: BLEU
type: bleu
value: 30.5
- name: chr-F
type: chrf
value: 0.58077
- task:
name: Translation ben-fra
type: translation
args: ben-fra
dataset:
name: flores200-devtest
type: flores200-devtest
args: ben-fra
metrics:
- name: BLEU
type: bleu
value: 22.6
- name: chr-F
type: chrf
value: 0.50884
- task:
name: Translation ben-por
type: translation
args: ben-por
dataset:
name: flores200-devtest
type: flores200-devtest
args: ben-por
metrics:
- name: BLEU
type: bleu
value: 21.4
- name: chr-F
type: chrf
value: 0.50054
- task:
name: Translation ben-spa
type: translation
args: ben-spa
dataset:
name: flores200-devtest
type: flores200-devtest
args: ben-spa
metrics:
- name: BLEU
type: bleu
value: 15.2
- name: chr-F
type: chrf
value: 0.44159
- task:
name: Translation bho-deu
type: translation
args: bho-deu
dataset:
name: flores200-devtest
type: flores200-devtest
args: bho-deu
metrics:
- name: BLEU
type: bleu
value: 12.6
- name: chr-F
type: chrf
value: 0.42660
- task:
name: Translation bho-eng
type: translation
args: bho-eng
dataset:
name: flores200-devtest
type: flores200-devtest
args: bho-eng
metrics:
- name: BLEU
type: bleu
value: 22.7
- name: chr-F
type: chrf
value: 0.50609
- task:
name: Translation bho-fra
type: translation
args: bho-fra
dataset:
name: flores200-devtest
type: flores200-devtest
args: bho-fra
metrics:
- name: BLEU
type: bleu
value: 16.8
- name: chr-F
type: chrf
value: 0.44889
- task:
name: Translation bho-por
type: translation
args: bho-por
dataset:
name: flores200-devtest
type: flores200-devtest
args: bho-por
metrics:
- name: BLEU
type: bleu
value: 16.9
- name: chr-F
type: chrf
value: 0.44582
- task:
name: Translation bho-spa
type: translation
args: bho-spa
dataset:
name: flores200-devtest
type: flores200-devtest
args: bho-spa
metrics:
- name: BLEU
type: bleu
value: 13.1
- name: chr-F
type: chrf
value: 0.40581
- task:
name: Translation guj-deu
type: translation
args: guj-deu
dataset:
name: flores200-devtest
type: flores200-devtest
args: guj-deu
metrics:
- name: BLEU
type: bleu
value: 16.8
- name: chr-F
type: chrf
value: 0.46665
- task:
name: Translation guj-eng
type: translation
args: guj-eng
dataset:
name: flores200-devtest
type: flores200-devtest
args: guj-eng
metrics:
- name: BLEU
type: bleu
value: 34.3
- name: chr-F
type: chrf
value: 0.61383
- task:
name: Translation guj-fra
type: translation
args: guj-fra
dataset:
name: flores200-devtest
type: flores200-devtest
args: guj-fra
metrics:
- name: BLEU
type: bleu
value: 22.3
- name: chr-F
type: chrf
value: 0.50410
- task:
name: Translation guj-por
type: translation
args: guj-por
dataset:
name: flores200-devtest
type: flores200-devtest
args: guj-por
metrics:
- name: BLEU
type: bleu
value: 21.3
- name: chr-F
type: chrf
value: 0.49257
- task:
name: Translation guj-spa
type: translation
args: guj-spa
dataset:
name: flores200-devtest
type: flores200-devtest
args: guj-spa
metrics:
- name: BLEU
type: bleu
value: 15.6
- name: chr-F
type: chrf
value: 0.44565
- task:
name: Translation hin-deu
type: translation
args: hin-deu
dataset:
name: flores200-devtest
type: flores200-devtest
args: hin-deu
metrics:
- name: BLEU
type: bleu
value: 20.4
- name: chr-F
type: chrf
value: 0.50226
- task:
name: Translation hin-eng
type: translation
args: hin-eng
dataset:
name: flores200-devtest
type: flores200-devtest
args: hin-eng
metrics:
- name: BLEU
type: bleu
value: 37.3
- name: chr-F
type: chrf
value: 0.63336
- task:
name: Translation hin-fra
type: translation
args: hin-fra
dataset:
name: flores200-devtest
type: flores200-devtest
args: hin-fra
metrics:
- name: BLEU
type: bleu
value: 25.9
- name: chr-F
type: chrf
value: 0.53701
- task:
name: Translation hin-por
type: translation
args: hin-por
dataset:
name: flores200-devtest
type: flores200-devtest
args: hin-por
metrics:
- name: BLEU
type: bleu
value: 25.5
- name: chr-F
type: chrf
value: 0.53448
- task:
name: Translation hin-spa
type: translation
args: hin-spa
dataset:
name: flores200-devtest
type: flores200-devtest
args: hin-spa
metrics:
- name: BLEU
type: bleu
value: 17.2
- name: chr-F
type: chrf
value: 0.46171
- task:
name: Translation hne-deu
type: translation
args: hne-deu
dataset:
name: flores200-devtest
type: flores200-devtest
args: hne-deu
metrics:
- name: BLEU
type: bleu
value: 19.0
- name: chr-F
type: chrf
value: 0.49698
- task:
name: Translation hne-eng
type: translation
args: hne-eng
dataset:
name: flores200-devtest
type: flores200-devtest
args: hne-eng
metrics:
- name: BLEU
type: bleu
value: 38.5
- name: chr-F
type: chrf
value: 0.63936
- task:
name: Translation hne-fra
type: translation
args: hne-fra
dataset:
name: flores200-devtest
type: flores200-devtest
args: hne-fra
metrics:
- name: BLEU
type: bleu
value: 25.3
- name: chr-F
type: chrf
value: 0.52835
- task:
name: Translation hne-por
type: translation
args: hne-por
dataset:
name: flores200-devtest
type: flores200-devtest
args: hne-por
metrics:
- name: BLEU
type: bleu
value: 25.0
- name: chr-F
type: chrf
value: 0.52788
- task:
name: Translation hne-spa
type: translation
args: hne-spa
dataset:
name: flores200-devtest
type: flores200-devtest
args: hne-spa
metrics:
- name: BLEU
type: bleu
value: 16.7
- name: chr-F
type: chrf
value: 0.45443
- task:
name: Translation mag-deu
type: translation
args: mag-deu
dataset:
name: flores200-devtest
type: flores200-devtest
args: mag-deu
metrics:
- name: BLEU
type: bleu
value: 19.7
- name: chr-F
type: chrf
value: 0.50359
- task:
name: Translation mag-eng
type: translation
args: mag-eng
dataset:
name: flores200-devtest
type: flores200-devtest
args: mag-eng
metrics:
- name: BLEU
type: bleu
value: 38.0
- name: chr-F
type: chrf
value: 0.63906
- task:
name: Translation mag-fra
type: translation
args: mag-fra
dataset:
name: flores200-devtest
type: flores200-devtest
args: mag-fra
metrics:
- name: BLEU
type: bleu
value: 25.8
- name: chr-F
type: chrf
value: 0.53616
- task:
name: Translation mag-por
type: translation
args: mag-por
dataset:
name: flores200-devtest
type: flores200-devtest
args: mag-por
metrics:
- name: BLEU
type: bleu
value: 25.9
- name: chr-F
type: chrf
value: 0.53537
- task:
name: Translation mag-spa
type: translation
args: mag-spa
dataset:
name: flores200-devtest
type: flores200-devtest
args: mag-spa
metrics:
- name: BLEU
type: bleu
value: 16.9
- name: chr-F
type: chrf
value: 0.45822
- task:
name: Translation mai-deu
type: translation
args: mai-deu
dataset:
name: flores200-devtest
type: flores200-devtest
args: mai-deu
metrics:
- name: BLEU
type: bleu
value: 16.2
- name: chr-F
type: chrf
value: 0.46791
- task:
name: Translation mai-eng
type: translation
args: mai-eng
dataset:
name: flores200-devtest
type: flores200-devtest
args: mai-eng
metrics:
- name: BLEU
type: bleu
value: 30.4
- name: chr-F
type: chrf
value: 0.57461
- task:
name: Translation mai-fra
type: translation
args: mai-fra
dataset:
name: flores200-devtest
type: flores200-devtest
args: mai-fra
metrics:
- name: BLEU
type: bleu
value: 22.1
- name: chr-F
type: chrf
value: 0.50585
- task:
name: Translation mai-por
type: translation
args: mai-por
dataset:
name: flores200-devtest
type: flores200-devtest
args: mai-por
metrics:
- name: BLEU
type: bleu
value: 22.0
- name: chr-F
type: chrf
value: 0.50490
- task:
name: Translation mai-spa
type: translation
args: mai-spa
dataset:
name: flores200-devtest
type: flores200-devtest
args: mai-spa
metrics:
- name: BLEU
type: bleu
value: 15.3
- name: chr-F
type: chrf
value: 0.44366
- task:
name: Translation mar-deu
type: translation
args: mar-deu
dataset:
name: flores200-devtest
type: flores200-devtest
args: mar-deu
metrics:
- name: BLEU
type: bleu
value: 14.5
- name: chr-F
type: chrf
value: 0.44725
- task:
name: Translation mar-eng
type: translation
args: mar-eng
dataset:
name: flores200-devtest
type: flores200-devtest
args: mar-eng
metrics:
- name: BLEU
type: bleu
value: 31.4
- name: chr-F
type: chrf
value: 0.58500
- task:
name: Translation mar-fra
type: translation
args: mar-fra
dataset:
name: flores200-devtest
type: flores200-devtest
args: mar-fra
metrics:
- name: BLEU
type: bleu
value: 19.5
- name: chr-F
type: chrf
value: 0.47027
- task:
name: Translation mar-por
type: translation
args: mar-por
dataset:
name: flores200-devtest
type: flores200-devtest
args: mar-por
metrics:
- name: BLEU
type: bleu
value: 19.3
- name: chr-F
type: chrf
value: 0.47216
- task:
name: Translation mar-spa
type: translation
args: mar-spa
dataset:
name: flores200-devtest
type: flores200-devtest
args: mar-spa
metrics:
- name: BLEU
type: bleu
value: 14.2
- name: chr-F
type: chrf
value: 0.42178
- task:
name: Translation npi-deu
type: translation
args: npi-deu
dataset:
name: flores200-devtest
type: flores200-devtest
args: npi-deu
metrics:
- name: BLEU
type: bleu
value: 16.4
- name: chr-F
type: chrf
value: 0.46631
- task:
name: Translation npi-eng
type: translation
args: npi-eng
dataset:
name: flores200-devtest
type: flores200-devtest
args: npi-eng
metrics:
- name: BLEU
type: bleu
value: 32.3
- name: chr-F
type: chrf
value: 0.59776
- task:
name: Translation npi-fra
type: translation
args: npi-fra
dataset:
name: flores200-devtest
type: flores200-devtest
args: npi-fra
metrics:
- name: BLEU
type: bleu
value: 22.5
- name: chr-F
type: chrf
value: 0.50548
- task:
name: Translation npi-por
type: translation
args: npi-por
dataset:
name: flores200-devtest
type: flores200-devtest
args: npi-por
metrics:
- name: BLEU
type: bleu
value: 21.7
- name: chr-F
type: chrf
value: 0.50202
- task:
name: Translation npi-spa
type: translation
args: npi-spa
dataset:
name: flores200-devtest
type: flores200-devtest
args: npi-spa
metrics:
- name: BLEU
type: bleu
value: 15.3
- name: chr-F
type: chrf
value: 0.43804
- task:
name: Translation pan-deu
type: translation
args: pan-deu
dataset:
name: flores200-devtest
type: flores200-devtest
args: pan-deu
metrics:
- name: BLEU
type: bleu
value: 18.7
- name: chr-F
type: chrf
value: 0.48421
- task:
name: Translation pan-eng
type: translation
args: pan-eng
dataset:
name: flores200-devtest
type: flores200-devtest
args: pan-eng
metrics:
- name: BLEU
type: bleu
value: 33.8
- name: chr-F
type: chrf
value: 0.60676
- task:
name: Translation pan-fra
type: translation
args: pan-fra
dataset:
name: flores200-devtest
type: flores200-devtest
args: pan-fra
metrics:
- name: BLEU
type: bleu
value: 23.5
- name: chr-F
type: chrf
value: 0.51368
- task:
name: Translation pan-por
type: translation
args: pan-por
dataset:
name: flores200-devtest
type: flores200-devtest
args: pan-por
metrics:
- name: BLEU
type: bleu
value: 22.7
- name: chr-F
type: chrf
value: 0.50586
- task:
name: Translation pan-spa
type: translation
args: pan-spa
dataset:
name: flores200-devtest
type: flores200-devtest
args: pan-spa
metrics:
- name: BLEU
type: bleu
value: 16.5
- name: chr-F
type: chrf
value: 0.44653
- task:
name: Translation san-eng
type: translation
args: san-eng
dataset:
name: flores200-devtest
type: flores200-devtest
args: san-eng
metrics:
- name: BLEU
type: bleu
value: 11.8
- name: chr-F
type: chrf
value: 0.36887
- task:
name: Translation sin-deu
type: translation
args: sin-deu
dataset:
name: flores200-devtest
type: flores200-devtest
args: sin-deu
metrics:
- name: BLEU
type: bleu
value: 14.2
- name: chr-F
type: chrf
value: 0.44676
- task:
name: Translation sin-eng
type: translation
args: sin-eng
dataset:
name: flores200-devtest
type: flores200-devtest
args: sin-eng
metrics:
- name: BLEU
type: bleu
value: 26.8
- name: chr-F
type: chrf
value: 0.54777
- task:
name: Translation sin-fra
type: translation
args: sin-fra
dataset:
name: flores200-devtest
type: flores200-devtest
args: sin-fra
metrics:
- name: BLEU
type: bleu
value: 19.0
- name: chr-F
type: chrf
value: 0.47283
- task:
name: Translation sin-por
type: translation
args: sin-por
dataset:
name: flores200-devtest
type: flores200-devtest
args: sin-por
metrics:
- name: BLEU
type: bleu
value: 18.4
- name: chr-F
type: chrf
value: 0.46935
- task:
name: Translation sin-spa
type: translation
args: sin-spa
dataset:
name: flores200-devtest
type: flores200-devtest
args: sin-spa
metrics:
- name: BLEU
type: bleu
value: 13.7
- name: chr-F
type: chrf
value: 0.42143
- task:
name: Translation urd-deu
type: translation
args: urd-deu
dataset:
name: flores200-devtest
type: flores200-devtest
args: urd-deu
metrics:
- name: BLEU
type: bleu
value: 17.1
- name: chr-F
type: chrf
value: 0.46542
- task:
name: Translation urd-eng
type: translation
args: urd-eng
dataset:
name: flores200-devtest
type: flores200-devtest
args: urd-eng
metrics:
- name: BLEU
type: bleu
value: 29.3
- name: chr-F
type: chrf
value: 0.56935
- task:
name: Translation urd-fra
type: translation
args: urd-fra
dataset:
name: flores200-devtest
type: flores200-devtest
args: urd-fra
metrics:
- name: BLEU
type: bleu
value: 22.3
- name: chr-F
type: chrf
value: 0.50276
- task:
name: Translation urd-por
type: translation
args: urd-por
dataset:
name: flores200-devtest
type: flores200-devtest
args: urd-por
metrics:
- name: BLEU
type: bleu
value: 20.3
- name: chr-F
type: chrf
value: 0.48010
- task:
name: Translation urd-spa
type: translation
args: urd-spa
dataset:
name: flores200-devtest
type: flores200-devtest
args: urd-spa
metrics:
- name: BLEU
type: bleu
value: 14.7
- name: chr-F
type: chrf
value: 0.43032
- task:
name: Translation asm-por
type: translation
args: asm-por
dataset:
name: flores101-devtest
type: flores_101
args: asm por devtest
metrics:
- name: BLEU
type: bleu
value: 10.0
- name: chr-F
type: chrf
value: 0.34689
- task:
name: Translation ben-eng
type: translation
args: ben-eng
dataset:
name: flores101-devtest
type: flores_101
args: ben eng devtest
metrics:
- name: BLEU
type: bleu
value: 30.4
- name: chr-F
type: chrf
value: 0.57906
- task:
name: Translation ben-fra
type: translation
args: ben-fra
dataset:
name: flores101-devtest
type: flores_101
args: ben fra devtest
metrics:
- name: BLEU
type: bleu
value: 21.9
- name: chr-F
type: chrf
value: 0.50109
- task:
name: Translation guj-spa
type: translation
args: guj-spa
dataset:
name: flores101-devtest
type: flores_101
args: guj spa devtest
metrics:
- name: BLEU
type: bleu
value: 15.2
- name: chr-F
type: chrf
value: 0.44065
- task:
name: Translation mar-deu
type: translation
args: mar-deu
dataset:
name: flores101-devtest
type: flores_101
args: mar deu devtest
metrics:
- name: BLEU
type: bleu
value: 13.8
- name: chr-F
type: chrf
value: 0.44067
- task:
name: Translation mar-por
type: translation
args: mar-por
dataset:
name: flores101-devtest
type: flores_101
args: mar por devtest
metrics:
- name: BLEU
type: bleu
value: 18.6
- name: chr-F
type: chrf
value: 0.46685
- task:
name: Translation mar-spa
type: translation
args: mar-spa
dataset:
name: flores101-devtest
type: flores_101
args: mar spa devtest
metrics:
- name: BLEU
type: bleu
value: 14.0
- name: chr-F
type: chrf
value: 0.41662
- task:
name: Translation pan-eng
type: translation
args: pan-eng
dataset:
name: flores101-devtest
type: flores_101
args: pan eng devtest
metrics:
- name: BLEU
type: bleu
value: 33.0
- name: chr-F
type: chrf
value: 0.59922
- task:
name: Translation pan-por
type: translation
args: pan-por
dataset:
name: flores101-devtest
type: flores_101
args: pan por devtest
metrics:
- name: BLEU
type: bleu
value: 21.9
- name: chr-F
type: chrf
value: 0.49373
- task:
name: Translation pan-spa
type: translation
args: pan-spa
dataset:
name: flores101-devtest
type: flores_101
args: pan spa devtest
metrics:
- name: BLEU
type: bleu
value: 15.4
- name: chr-F
type: chrf
value: 0.43910
- task:
name: Translation ben-deu
type: translation
args: ben-deu
dataset:
name: ntrex128
type: ntrex128
args: ben-deu
metrics:
- name: BLEU
type: bleu
value: 14.6
- name: chr-F
type: chrf
value: 0.45180
- task:
name: Translation ben-eng
type: translation
args: ben-eng
dataset:
name: ntrex128
type: ntrex128
args: ben-eng
metrics:
- name: BLEU
type: bleu
value: 29.5
- name: chr-F
type: chrf
value: 0.57247
- task:
name: Translation ben-fra
type: translation
args: ben-fra
dataset:
name: ntrex128
type: ntrex128
args: ben-fra
metrics:
- name: BLEU
type: bleu
value: 18.0
- name: chr-F
type: chrf
value: 0.46475
- task:
name: Translation ben-por
type: translation
args: ben-por
dataset:
name: ntrex128
type: ntrex128
args: ben-por
metrics:
- name: BLEU
type: bleu
value: 16.8
- name: chr-F
type: chrf
value: 0.45486
- task:
name: Translation ben-spa
type: translation
args: ben-spa
dataset:
name: ntrex128
type: ntrex128
args: ben-spa
metrics:
- name: BLEU
type: bleu
value: 21.1
- name: chr-F
type: chrf
value: 0.48738
- task:
name: Translation guj-deu
type: translation
args: guj-deu
dataset:
name: ntrex128
type: ntrex128
args: guj-deu
metrics:
- name: BLEU
type: bleu
value: 13.9
- name: chr-F
type: chrf
value: 0.43539
- task:
name: Translation guj-eng
type: translation
args: guj-eng
dataset:
name: ntrex128
type: ntrex128
args: guj-eng
metrics:
- name: BLEU
type: bleu
value: 31.6
- name: chr-F
type: chrf
value: 0.58894
- task:
name: Translation guj-fra
type: translation
args: guj-fra
dataset:
name: ntrex128
type: ntrex128
args: guj-fra
metrics:
- name: BLEU
type: bleu
value: 16.9
- name: chr-F
type: chrf
value: 0.45075
- task:
name: Translation guj-por
type: translation
args: guj-por
dataset:
name: ntrex128
type: ntrex128
args: guj-por
metrics:
- name: BLEU
type: bleu
value: 15.2
- name: chr-F
type: chrf
value: 0.43567
- task:
name: Translation guj-spa
type: translation
args: guj-spa
dataset:
name: ntrex128
type: ntrex128
args: guj-spa
metrics:
- name: BLEU
type: bleu
value: 20.2
- name: chr-F
type: chrf
value: 0.47525
- task:
name: Translation hin-deu
type: translation
args: hin-deu
dataset:
name: ntrex128
type: ntrex128
args: hin-deu
metrics:
- name: BLEU
type: bleu
value: 15.0
- name: chr-F
type: chrf
value: 0.46336
- task:
name: Translation hin-eng
type: translation
args: hin-eng
dataset:
name: ntrex128
type: ntrex128
args: hin-eng
metrics:
- name: BLEU
type: bleu
value: 31.5
- name: chr-F
type: chrf
value: 0.59842
- task:
name: Translation hin-fra
type: translation
args: hin-fra
dataset:
name: ntrex128
type: ntrex128
args: hin-fra
metrics:
- name: BLEU
type: bleu
value: 19.2
- name: chr-F
type: chrf
value: 0.48208
- task:
name: Translation hin-por
type: translation
args: hin-por
dataset:
name: ntrex128
type: ntrex128
args: hin-por
metrics:
- name: BLEU
type: bleu
value: 17.6
- name: chr-F
type: chrf
value: 0.46509
- task:
name: Translation hin-spa
type: translation
args: hin-spa
dataset:
name: ntrex128
type: ntrex128
args: hin-spa
metrics:
- name: BLEU
type: bleu
value: 21.8
- name: chr-F
type: chrf
value: 0.49436
- task:
name: Translation mar-deu
type: translation
args: mar-deu
dataset:
name: ntrex128
type: ntrex128
args: mar-deu
metrics:
- name: BLEU
type: bleu
value: 12.8
- name: chr-F
type: chrf
value: 0.43119
- task:
name: Translation mar-eng
type: translation
args: mar-eng
dataset:
name: ntrex128
type: ntrex128
args: mar-eng
metrics:
- name: BLEU
type: bleu
value: 27.3
- name: chr-F
type: chrf
value: 0.55151
- task:
name: Translation mar-fra
type: translation
args: mar-fra
dataset:
name: ntrex128
type: ntrex128
args: mar-fra
metrics:
- name: BLEU
type: bleu
value: 16.2
- name: chr-F
type: chrf
value: 0.43957
- task:
name: Translation mar-por
type: translation
args: mar-por
dataset:
name: ntrex128
type: ntrex128
args: mar-por
metrics:
- name: BLEU
type: bleu
value: 15.4
- name: chr-F
type: chrf
value: 0.43555
- task:
name: Translation mar-spa
type: translation
args: mar-spa
dataset:
name: ntrex128
type: ntrex128
args: mar-spa
metrics:
- name: BLEU
type: bleu
value: 19.1
- name: chr-F
type: chrf
value: 0.46271
- task:
name: Translation nep-deu
type: translation
args: nep-deu
dataset:
name: ntrex128
type: ntrex128
args: nep-deu
metrics:
- name: BLEU
type: bleu
value: 13.0
- name: chr-F
type: chrf
value: 0.42940
- task:
name: Translation nep-eng
type: translation
args: nep-eng
dataset:
name: ntrex128
type: ntrex128
args: nep-eng
metrics:
- name: BLEU
type: bleu
value: 29.1
- name: chr-F
type: chrf
value: 0.56277
- task:
name: Translation nep-fra
type: translation
args: nep-fra
dataset:
name: ntrex128
type: ntrex128
args: nep-fra
metrics:
- name: BLEU
type: bleu
value: 16.5
- name: chr-F
type: chrf
value: 0.44663
- task:
name: Translation nep-por
type: translation
args: nep-por
dataset:
name: ntrex128
type: ntrex128
args: nep-por
metrics:
- name: BLEU
type: bleu
value: 15.4
- name: chr-F
type: chrf
value: 0.43686
- task:
name: Translation nep-spa
type: translation
args: nep-spa
dataset:
name: ntrex128
type: ntrex128
args: nep-spa
metrics:
- name: BLEU
type: bleu
value: 19.3
- name: chr-F
type: chrf
value: 0.46553
- task:
name: Translation pan-deu
type: translation
args: pan-deu
dataset:
name: ntrex128
type: ntrex128
args: pan-deu
metrics:
- name: BLEU
type: bleu
value: 14.1
- name: chr-F
type: chrf
value: 0.44036
- task:
name: Translation pan-eng
type: translation
args: pan-eng
dataset:
name: ntrex128
type: ntrex128
args: pan-eng
metrics:
- name: BLEU
type: bleu
value: 31.6
- name: chr-F
type: chrf
value: 0.58427
- task:
name: Translation pan-fra
type: translation
args: pan-fra
dataset:
name: ntrex128
type: ntrex128
args: pan-fra
metrics:
- name: BLEU
type: bleu
value: 17.3
- name: chr-F
type: chrf
value: 0.45593
- task:
name: Translation pan-por
type: translation
args: pan-por
dataset:
name: ntrex128
type: ntrex128
args: pan-por
metrics:
- name: BLEU
type: bleu
value: 15.9
- name: chr-F
type: chrf
value: 0.44264
- task:
name: Translation pan-spa
type: translation
args: pan-spa
dataset:
name: ntrex128
type: ntrex128
args: pan-spa
metrics:
- name: BLEU
type: bleu
value: 20.0
- name: chr-F
type: chrf
value: 0.47199
- task:
name: Translation sin-deu
type: translation
args: sin-deu
dataset:
name: ntrex128
type: ntrex128
args: sin-deu
metrics:
- name: BLEU
type: bleu
value: 12.4
- name: chr-F
type: chrf
value: 0.42280
- task:
name: Translation sin-eng
type: translation
args: sin-eng
dataset:
name: ntrex128
type: ntrex128
args: sin-eng
metrics:
- name: BLEU
type: bleu
value: 24.6
- name: chr-F
type: chrf
value: 0.52576
- task:
name: Translation sin-fra
type: translation
args: sin-fra
dataset:
name: ntrex128
type: ntrex128
args: sin-fra
metrics:
- name: BLEU
type: bleu
value: 15.6
- name: chr-F
type: chrf
value: 0.43594
- task:
name: Translation sin-por
type: translation
args: sin-por
dataset:
name: ntrex128
type: ntrex128
args: sin-por
metrics:
- name: BLEU
type: bleu
value: 14.4
- name: chr-F
type: chrf
value: 0.42751
- task:
name: Translation sin-spa
type: translation
args: sin-spa
dataset:
name: ntrex128
type: ntrex128
args: sin-spa
metrics:
- name: BLEU
type: bleu
value: 18.3
- name: chr-F
type: chrf
value: 0.45890
- task:
name: Translation urd-deu
type: translation
args: urd-deu
dataset:
name: ntrex128
type: ntrex128
args: urd-deu
metrics:
- name: BLEU
type: bleu
value: 15.6
- name: chr-F
type: chrf
value: 0.45737
- task:
name: Translation urd-eng
type: translation
args: urd-eng
dataset:
name: ntrex128
type: ntrex128
args: urd-eng
metrics:
- name: BLEU
type: bleu
value: 28.6
- name: chr-F
type: chrf
value: 0.56781
- task:
name: Translation urd-fra
type: translation
args: urd-fra
dataset:
name: ntrex128
type: ntrex128
args: urd-fra
metrics:
- name: BLEU
type: bleu
value: 18.9
- name: chr-F
type: chrf
value: 0.47298
- task:
name: Translation urd-por
type: translation
args: urd-por
dataset:
name: ntrex128
type: ntrex128
args: urd-por
metrics:
- name: BLEU
type: bleu
value: 16.2
- name: chr-F
type: chrf
value: 0.45273
- task:
name: Translation urd-spa
type: translation
args: urd-spa
dataset:
name: ntrex128
type: ntrex128
args: urd-spa
metrics:
- name: BLEU
type: bleu
value: 21.0
- name: chr-F
type: chrf
value: 0.48644
- task:
name: Translation awa-eng
type: translation
args: awa-eng
dataset:
name: tatoeba-test-v2021-08-07
type: tatoeba_mt
args: awa-eng
metrics:
- name: BLEU
type: bleu
value: 40.8
- name: chr-F
type: chrf
value: 0.60390
- task:
name: Translation ben-eng
type: translation
args: ben-eng
dataset:
name: tatoeba-test-v2021-08-07
type: tatoeba_mt
args: ben-eng
metrics:
- name: BLEU
type: bleu
value: 49.4
- name: chr-F
type: chrf
value: 0.64078
- task:
name: Translation hin-eng
type: translation
args: hin-eng
dataset:
name: tatoeba-test-v2021-08-07
type: tatoeba_mt
args: hin-eng
metrics:
- name: BLEU
type: bleu
value: 49.1
- name: chr-F
type: chrf
value: 0.64929
- task:
name: Translation mar-eng
type: translation
args: mar-eng
dataset:
name: tatoeba-test-v2021-08-07
type: tatoeba_mt
args: mar-eng
metrics:
- name: BLEU
type: bleu
value: 48.0
- name: chr-F
type: chrf
value: 0.64074
- task:
name: Translation multi-multi
type: translation
args: multi-multi
dataset:
name: tatoeba-test-v2020-07-28-v2023-09-26
type: tatoeba_mt
args: multi-multi
metrics:
- name: BLEU
type: bleu
value: 44.6
- name: chr-F
type: chrf
value: 0.61360
- task:
name: Translation urd-eng
type: translation
args: urd-eng
dataset:
name: tatoeba-test-v2021-08-07
type: tatoeba_mt
args: urd-eng
metrics:
- name: BLEU
type: bleu
value: 35.0
- name: chr-F
type: chrf
value: 0.52963
- task:
name: Translation ben-eng
type: translation
args: ben-eng
dataset:
name: tico19-test
type: tico19-test
args: ben-eng
metrics:
- name: BLEU
type: bleu
value: 38.2
- name: chr-F
type: chrf
value: 0.64568
- task:
name: Translation ben-fra
type: translation
args: ben-fra
dataset:
name: tico19-test
type: tico19-test
args: ben-fra
metrics:
- name: BLEU
type: bleu
value: 22.0
- name: chr-F
type: chrf
value: 0.49799
- task:
name: Translation ben-por
type: translation
args: ben-por
dataset:
name: tico19-test
type: tico19-test
args: ben-por
metrics:
- name: BLEU
type: bleu
value: 27.2
- name: chr-F
type: chrf
value: 0.55115
- task:
name: Translation ben-spa
type: translation
args: ben-spa
dataset:
name: tico19-test
type: tico19-test
args: ben-spa
metrics:
- name: BLEU
type: bleu
value: 29.9
- name: chr-F
type: chrf
value: 0.56847
- task:
name: Translation hin-eng
type: translation
args: hin-eng
dataset:
name: tico19-test
type: tico19-test
args: hin-eng
metrics:
- name: BLEU
type: bleu
value: 46.6
- name: chr-F
type: chrf
value: 0.70694
- task:
name: Translation hin-fra
type: translation
args: hin-fra
dataset:
name: tico19-test
type: tico19-test
args: hin-fra
metrics:
- name: BLEU
type: bleu
value: 26.7
- name: chr-F
type: chrf
value: 0.53932
- task:
name: Translation hin-por
type: translation
args: hin-por
dataset:
name: tico19-test
type: tico19-test
args: hin-por
metrics:
- name: BLEU
type: bleu
value: 33.4
- name: chr-F
type: chrf
value: 0.60581
- task:
name: Translation hin-spa
type: translation
args: hin-spa
dataset:
name: tico19-test
type: tico19-test
args: hin-spa
metrics:
- name: BLEU
type: bleu
value: 35.7
- name: chr-F
type: chrf
value: 0.61585
- task:
name: Translation mar-eng
type: translation
args: mar-eng
dataset:
name: tico19-test
type: tico19-test
args: mar-eng
metrics:
- name: BLEU
type: bleu
value: 31.8
- name: chr-F
type: chrf
value: 0.59329
- task:
name: Translation mar-fra
type: translation
args: mar-fra
dataset:
name: tico19-test
type: tico19-test
args: mar-fra
metrics:
- name: BLEU
type: bleu
value: 19.3
- name: chr-F
type: chrf
value: 0.46574
- task:
name: Translation mar-por
type: translation
args: mar-por
dataset:
name: tico19-test
type: tico19-test
args: mar-por
metrics:
- name: BLEU
type: bleu
value: 23.6
- name: chr-F
type: chrf
value: 0.51463
- task:
name: Translation mar-spa
type: translation
args: mar-spa
dataset:
name: tico19-test
type: tico19-test
args: mar-spa
metrics:
- name: BLEU
type: bleu
value: 25.7
- name: chr-F
type: chrf
value: 0.52551
- task:
name: Translation nep-eng
type: translation
args: nep-eng
dataset:
name: tico19-test
type: tico19-test
args: nep-eng
metrics:
- name: BLEU
type: bleu
value: 40.7
- name: chr-F
type: chrf
value: 0.66283
- task:
name: Translation nep-fra
type: translation
args: nep-fra
dataset:
name: tico19-test
type: tico19-test
args: nep-fra
metrics:
- name: BLEU
type: bleu
value: 22.8
- name: chr-F
type: chrf
value: 0.50397
- task:
name: Translation nep-por
type: translation
args: nep-por
dataset:
name: tico19-test
type: tico19-test
args: nep-por
metrics:
- name: BLEU
type: bleu
value: 28.1
- name: chr-F
type: chrf
value: 0.55951
- task:
name: Translation nep-spa
type: translation
args: nep-spa
dataset:
name: tico19-test
type: tico19-test
args: nep-spa
metrics:
- name: BLEU
type: bleu
value: 30.3
- name: chr-F
type: chrf
value: 0.57272
- task:
name: Translation urd-eng
type: translation
args: urd-eng
dataset:
name: tico19-test
type: tico19-test
args: urd-eng
metrics:
- name: BLEU
type: bleu
value: 30.5
- name: chr-F
type: chrf
value: 0.57473
- task:
name: Translation urd-fra
type: translation
args: urd-fra
dataset:
name: tico19-test
type: tico19-test
args: urd-fra
metrics:
- name: BLEU
type: bleu
value: 19.6
- name: chr-F
type: chrf
value: 0.46725
- task:
name: Translation urd-por
type: translation
args: urd-por
dataset:
name: tico19-test
type: tico19-test
args: urd-por
metrics:
- name: BLEU
type: bleu
value: 23.5
- name: chr-F
type: chrf
value: 0.50913
- task:
name: Translation urd-spa
type: translation
args: urd-spa
dataset:
name: tico19-test
type: tico19-test
args: urd-spa
metrics:
- name: BLEU
type: bleu
value: 25.8
- name: chr-F
type: chrf
value: 0.52387
- task:
name: Translation hin-eng
type: translation
args: hin-eng
dataset:
name: newstest2014
type: wmt-2014-news
args: hin-eng
metrics:
- name: BLEU
type: bleu
value: 30.3
- name: chr-F
type: chrf
value: 0.59329
- task:
name: Translation guj-eng
type: translation
args: guj-eng
dataset:
name: newstest2019
type: wmt-2019-news
args: guj-eng
metrics:
- name: BLEU
type: bleu
value: 26.9
- name: chr-F
type: chrf
value: 0.53383
---
# opus-mt-tc-bible-big-inc-deu_eng_fra_por_spa
## Table of Contents
- [Model Details](#model-details)
- [Uses](#uses)
- [Risks, Limitations and Biases](#risks-limitations-and-biases)
- [How to Get Started With the Model](#how-to-get-started-with-the-model)
- [Training](#training)
- [Evaluation](#evaluation)
- [Citation Information](#citation-information)
- [Acknowledgements](#acknowledgements)
## Model Details
Neural machine translation model for translating from Indic languages (inc) to unknown (deu+eng+fra+por+spa).
This model is part of the [OPUS-MT project](https://github.com/Helsinki-NLP/Opus-MT), an effort to make neural machine translation models widely available and accessible for many languages in the world. All models are originally trained using the amazing framework of [Marian NMT](https://marian-nmt.github.io/), an efficient NMT implementation written in pure C++. The models have been converted to pyTorch using the transformers library by huggingface. Training data is taken from [OPUS](https://opus.nlpl.eu/) and training pipelines use the procedures of [OPUS-MT-train](https://github.com/Helsinki-NLP/Opus-MT-train).
**Model Description:**
- **Developed by:** Language Technology Research Group at the University of Helsinki
- **Model Type:** Translation (transformer-big)
- **Release**: 2024-05-30
- **License:** Apache-2.0
- **Language(s):**
- Source Language(s): anp asm awa ben bho bpy div dty gbm guj hif hin hne hns kas kok lah mag mai mar nep npi ori pan pli rhg rmy rom san sin skr snd syl urd
- Target Language(s): deu eng fra por spa
- Valid Target Language Labels: >>deu<< >>eng<< >>fra<< >>por<< >>spa<< >>xxx<<
- **Original Model**: [opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-30.zip](https://object.pouta.csc.fi/Tatoeba-MT-models/inc-deu+eng+fra+por+spa/opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-30.zip)
- **Resources for more information:**
- [OPUS-MT dashboard](https://opus.nlpl.eu/dashboard/index.php?pkg=opusmt&test=all&scoreslang=all&chart=standard&model=Tatoeba-MT-models/inc-deu%2Beng%2Bfra%2Bpor%2Bspa/opusTCv20230926max50%2Bbt%2Bjhubc_transformer-big_2024-05-30)
- [OPUS-MT-train GitHub Repo](https://github.com/Helsinki-NLP/OPUS-MT-train)
- [More information about MarianNMT models in the transformers library](https://huggingface.co/docs/transformers/model_doc/marian)
- [Tatoeba Translation Challenge](https://github.com/Helsinki-NLP/Tatoeba-Challenge/)
- [HPLT bilingual data v1 (as part of the Tatoeba Translation Challenge dataset)](https://hplt-project.org/datasets/v1)
- [A massively parallel Bible corpus](https://aclanthology.org/L14-1215/)
This is a multilingual translation model with multiple target languages. A sentence initial language token is required in the form of `>>id<<` (id = valid target language ID), e.g. `>>deu<<`
## Uses
This model can be used for translation and text-to-text generation.
## Risks, Limitations and Biases
**CONTENT WARNING: Readers should be aware that the model is trained on various public data sets that may contain content that is disturbing, offensive, and can propagate historical and current stereotypes.**
Significant research has explored bias and fairness issues with language models (see, e.g., [Sheng et al. (2021)](https://aclanthology.org/2021.acl-long.330.pdf) and [Bender et al. (2021)](https://dl.acm.org/doi/pdf/10.1145/3442188.3445922)).
## How to Get Started With the Model
A short example code:
```python
from transformers import MarianMTModel, MarianTokenizer
src_text = [
">>deu<< Replace this with text in an accepted source language.",
">>spa<< This is the second sentence."
]
model_name = "pytorch-models/opus-mt-tc-bible-big-inc-deu_eng_fra_por_spa"
tokenizer = MarianTokenizer.from_pretrained(model_name)
model = MarianMTModel.from_pretrained(model_name)
translated = model.generate(**tokenizer(src_text, return_tensors="pt", padding=True))
for t in translated:
print( tokenizer.decode(t, skip_special_tokens=True) )
```
You can also use OPUS-MT models with the transformers pipelines, for example:
```python
from transformers import pipeline
pipe = pipeline("translation", model="Helsinki-NLP/opus-mt-tc-bible-big-inc-deu_eng_fra_por_spa")
print(pipe(">>deu<< Replace this with text in an accepted source language."))
```
## Training
- **Data**: opusTCv20230926max50+bt+jhubc ([source](https://github.com/Helsinki-NLP/Tatoeba-Challenge))
- **Pre-processing**: SentencePiece (spm32k,spm32k)
- **Model Type:** transformer-big
- **Original MarianNMT Model**: [opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-30.zip](https://object.pouta.csc.fi/Tatoeba-MT-models/inc-deu+eng+fra+por+spa/opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-30.zip)
- **Training Scripts**: [GitHub Repo](https://github.com/Helsinki-NLP/OPUS-MT-train)
## Evaluation
* [Model scores at the OPUS-MT dashboard](https://opus.nlpl.eu/dashboard/index.php?pkg=opusmt&test=all&scoreslang=all&chart=standard&model=Tatoeba-MT-models/inc-deu%2Beng%2Bfra%2Bpor%2Bspa/opusTCv20230926max50%2Bbt%2Bjhubc_transformer-big_2024-05-30)
* test set translations: [opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-29.test.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/inc-deu+eng+fra+por+spa/opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-29.test.txt)
* test set scores: [opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-29.eval.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/inc-deu+eng+fra+por+spa/opusTCv20230926max50+bt+jhubc_transformer-big_2024-05-29.eval.txt)
* benchmark results: [benchmark_results.txt](benchmark_results.txt)
* benchmark output: [benchmark_translations.zip](benchmark_translations.zip)
| langpair | testset | chr-F | BLEU | #sent | #words |
|----------|---------|-------|-------|-------|--------|
| awa-eng | tatoeba-test-v2021-08-07 | 0.60390 | 40.8 | 279 | 1335 |
| ben-eng | tatoeba-test-v2021-08-07 | 0.64078 | 49.4 | 2500 | 13978 |
| hin-eng | tatoeba-test-v2021-08-07 | 0.64929 | 49.1 | 5000 | 33943 |
| mar-eng | tatoeba-test-v2021-08-07 | 0.64074 | 48.0 | 10396 | 67527 |
| urd-eng | tatoeba-test-v2021-08-07 | 0.52963 | 35.0 | 1663 | 12029 |
| ben-eng | flores101-devtest | 0.57906 | 30.4 | 1012 | 24721 |
| ben-fra | flores101-devtest | 0.50109 | 21.9 | 1012 | 28343 |
| guj-spa | flores101-devtest | 0.44065 | 15.2 | 1012 | 29199 |
| mar-deu | flores101-devtest | 0.44067 | 13.8 | 1012 | 25094 |
| mar-por | flores101-devtest | 0.46685 | 18.6 | 1012 | 26519 |
| mar-spa | flores101-devtest | 0.41662 | 14.0 | 1012 | 29199 |
| pan-eng | flores101-devtest | 0.59922 | 33.0 | 1012 | 24721 |
| pan-por | flores101-devtest | 0.49373 | 21.9 | 1012 | 26519 |
| pan-spa | flores101-devtest | 0.43910 | 15.4 | 1012 | 29199 |
| asm-eng | flores200-devtest | 0.48584 | 21.9 | 1012 | 24721 |
| awa-deu | flores200-devtest | 0.47173 | 16.5 | 1012 | 25094 |
| awa-eng | flores200-devtest | 0.50582 | 24.5 | 1012 | 24721 |
| awa-fra | flores200-devtest | 0.49682 | 21.4 | 1012 | 28343 |
| awa-por | flores200-devtest | 0.49663 | 21.5 | 1012 | 26519 |
| awa-spa | flores200-devtest | 0.43740 | 15.1 | 1012 | 29199 |
| ben-deu | flores200-devtest | 0.47330 | 16.6 | 1012 | 25094 |
| ben-eng | flores200-devtest | 0.58077 | 30.5 | 1012 | 24721 |
| ben-fra | flores200-devtest | 0.50884 | 22.6 | 1012 | 28343 |
| ben-por | flores200-devtest | 0.50054 | 21.4 | 1012 | 26519 |
| ben-spa | flores200-devtest | 0.44159 | 15.2 | 1012 | 29199 |
| bho-deu | flores200-devtest | 0.42660 | 12.6 | 1012 | 25094 |
| bho-eng | flores200-devtest | 0.50609 | 22.7 | 1012 | 24721 |
| bho-fra | flores200-devtest | 0.44889 | 16.8 | 1012 | 28343 |
| bho-por | flores200-devtest | 0.44582 | 16.9 | 1012 | 26519 |
| bho-spa | flores200-devtest | 0.40581 | 13.1 | 1012 | 29199 |
| guj-deu | flores200-devtest | 0.46665 | 16.8 | 1012 | 25094 |
| guj-eng | flores200-devtest | 0.61383 | 34.3 | 1012 | 24721 |
| guj-fra | flores200-devtest | 0.50410 | 22.3 | 1012 | 28343 |
| guj-por | flores200-devtest | 0.49257 | 21.3 | 1012 | 26519 |
| guj-spa | flores200-devtest | 0.44565 | 15.6 | 1012 | 29199 |
| hin-deu | flores200-devtest | 0.50226 | 20.4 | 1012 | 25094 |
| hin-eng | flores200-devtest | 0.63336 | 37.3 | 1012 | 24721 |
| hin-fra | flores200-devtest | 0.53701 | 25.9 | 1012 | 28343 |
| hin-por | flores200-devtest | 0.53448 | 25.5 | 1012 | 26519 |
| hin-spa | flores200-devtest | 0.46171 | 17.2 | 1012 | 29199 |
| hne-deu | flores200-devtest | 0.49698 | 19.0 | 1012 | 25094 |
| hne-eng | flores200-devtest | 0.63936 | 38.5 | 1012 | 24721 |
| hne-fra | flores200-devtest | 0.52835 | 25.3 | 1012 | 28343 |
| hne-por | flores200-devtest | 0.52788 | 25.0 | 1012 | 26519 |
| hne-spa | flores200-devtest | 0.45443 | 16.7 | 1012 | 29199 |
| mag-deu | flores200-devtest | 0.50359 | 19.7 | 1012 | 25094 |
| mag-eng | flores200-devtest | 0.63906 | 38.0 | 1012 | 24721 |
| mag-fra | flores200-devtest | 0.53616 | 25.8 | 1012 | 28343 |
| mag-por | flores200-devtest | 0.53537 | 25.9 | 1012 | 26519 |
| mag-spa | flores200-devtest | 0.45822 | 16.9 | 1012 | 29199 |
| mai-deu | flores200-devtest | 0.46791 | 16.2 | 1012 | 25094 |
| mai-eng | flores200-devtest | 0.57461 | 30.4 | 1012 | 24721 |
| mai-fra | flores200-devtest | 0.50585 | 22.1 | 1012 | 28343 |
| mai-por | flores200-devtest | 0.50490 | 22.0 | 1012 | 26519 |
| mai-spa | flores200-devtest | 0.44366 | 15.3 | 1012 | 29199 |
| mar-deu | flores200-devtest | 0.44725 | 14.5 | 1012 | 25094 |
| mar-eng | flores200-devtest | 0.58500 | 31.4 | 1012 | 24721 |
| mar-fra | flores200-devtest | 0.47027 | 19.5 | 1012 | 28343 |
| mar-por | flores200-devtest | 0.47216 | 19.3 | 1012 | 26519 |
| mar-spa | flores200-devtest | 0.42178 | 14.2 | 1012 | 29199 |
| npi-deu | flores200-devtest | 0.46631 | 16.4 | 1012 | 25094 |
| npi-eng | flores200-devtest | 0.59776 | 32.3 | 1012 | 24721 |
| npi-fra | flores200-devtest | 0.50548 | 22.5 | 1012 | 28343 |
| npi-por | flores200-devtest | 0.50202 | 21.7 | 1012 | 26519 |
| npi-spa | flores200-devtest | 0.43804 | 15.3 | 1012 | 29199 |
| pan-deu | flores200-devtest | 0.48421 | 18.7 | 1012 | 25094 |
| pan-eng | flores200-devtest | 0.60676 | 33.8 | 1012 | 24721 |
| pan-fra | flores200-devtest | 0.51368 | 23.5 | 1012 | 28343 |
| pan-por | flores200-devtest | 0.50586 | 22.7 | 1012 | 26519 |
| pan-spa | flores200-devtest | 0.44653 | 16.5 | 1012 | 29199 |
| sin-deu | flores200-devtest | 0.44676 | 14.2 | 1012 | 25094 |
| sin-eng | flores200-devtest | 0.54777 | 26.8 | 1012 | 24721 |
| sin-fra | flores200-devtest | 0.47283 | 19.0 | 1012 | 28343 |
| sin-por | flores200-devtest | 0.46935 | 18.4 | 1012 | 26519 |
| sin-spa | flores200-devtest | 0.42143 | 13.7 | 1012 | 29199 |
| urd-deu | flores200-devtest | 0.46542 | 17.1 | 1012 | 25094 |
| urd-eng | flores200-devtest | 0.56935 | 29.3 | 1012 | 24721 |
| urd-fra | flores200-devtest | 0.50276 | 22.3 | 1012 | 28343 |
| urd-por | flores200-devtest | 0.48010 | 20.3 | 1012 | 26519 |
| urd-spa | flores200-devtest | 0.43032 | 14.7 | 1012 | 29199 |
| hin-eng | newstest2014 | 0.59329 | 30.3 | 2507 | 55571 |
| guj-eng | newstest2019 | 0.53383 | 26.9 | 1016 | 17757 |
| ben-deu | ntrex128 | 0.45180 | 14.6 | 1997 | 48761 |
| ben-eng | ntrex128 | 0.57247 | 29.5 | 1997 | 47673 |
| ben-fra | ntrex128 | 0.46475 | 18.0 | 1997 | 53481 |
| ben-por | ntrex128 | 0.45486 | 16.8 | 1997 | 51631 |
| ben-spa | ntrex128 | 0.48738 | 21.1 | 1997 | 54107 |
| guj-deu | ntrex128 | 0.43539 | 13.9 | 1997 | 48761 |
| guj-eng | ntrex128 | 0.58894 | 31.6 | 1997 | 47673 |
| guj-fra | ntrex128 | 0.45075 | 16.9 | 1997 | 53481 |
| guj-por | ntrex128 | 0.43567 | 15.2 | 1997 | 51631 |
| guj-spa | ntrex128 | 0.47525 | 20.2 | 1997 | 54107 |
| hin-deu | ntrex128 | 0.46336 | 15.0 | 1997 | 48761 |
| hin-eng | ntrex128 | 0.59842 | 31.5 | 1997 | 47673 |
| hin-fra | ntrex128 | 0.48208 | 19.2 | 1997 | 53481 |
| hin-por | ntrex128 | 0.46509 | 17.6 | 1997 | 51631 |
| hin-spa | ntrex128 | 0.49436 | 21.8 | 1997 | 54107 |
| mar-deu | ntrex128 | 0.43119 | 12.8 | 1997 | 48761 |
| mar-eng | ntrex128 | 0.55151 | 27.3 | 1997 | 47673 |
| mar-fra | ntrex128 | 0.43957 | 16.2 | 1997 | 53481 |
| mar-por | ntrex128 | 0.43555 | 15.4 | 1997 | 51631 |
| mar-spa | ntrex128 | 0.46271 | 19.1 | 1997 | 54107 |
| nep-deu | ntrex128 | 0.42940 | 13.0 | 1997 | 48761 |
| nep-eng | ntrex128 | 0.56277 | 29.1 | 1997 | 47673 |
| nep-fra | ntrex128 | 0.44663 | 16.5 | 1997 | 53481 |
| nep-por | ntrex128 | 0.43686 | 15.4 | 1997 | 51631 |
| nep-spa | ntrex128 | 0.46553 | 19.3 | 1997 | 54107 |
| pan-deu | ntrex128 | 0.44036 | 14.1 | 1997 | 48761 |
| pan-eng | ntrex128 | 0.58427 | 31.6 | 1997 | 47673 |
| pan-fra | ntrex128 | 0.45593 | 17.3 | 1997 | 53481 |
| pan-por | ntrex128 | 0.44264 | 15.9 | 1997 | 51631 |
| pan-spa | ntrex128 | 0.47199 | 20.0 | 1997 | 54107 |
| sin-deu | ntrex128 | 0.42280 | 12.4 | 1997 | 48761 |
| sin-eng | ntrex128 | 0.52576 | 24.6 | 1997 | 47673 |
| sin-fra | ntrex128 | 0.43594 | 15.6 | 1997 | 53481 |
| sin-por | ntrex128 | 0.42751 | 14.4 | 1997 | 51631 |
| sin-spa | ntrex128 | 0.45890 | 18.3 | 1997 | 54107 |
| urd-deu | ntrex128 | 0.45737 | 15.6 | 1997 | 48761 |
| urd-eng | ntrex128 | 0.56781 | 28.6 | 1997 | 47673 |
| urd-fra | ntrex128 | 0.47298 | 18.9 | 1997 | 53481 |
| urd-por | ntrex128 | 0.45273 | 16.2 | 1997 | 51631 |
| urd-spa | ntrex128 | 0.48644 | 21.0 | 1997 | 54107 |
| ben-eng | tico19-test | 0.64568 | 38.2 | 2100 | 56824 |
| ben-fra | tico19-test | 0.49799 | 22.0 | 2100 | 64661 |
| ben-por | tico19-test | 0.55115 | 27.2 | 2100 | 62729 |
| ben-spa | tico19-test | 0.56847 | 29.9 | 2100 | 66563 |
| hin-eng | tico19-test | 0.70694 | 46.6 | 2100 | 56323 |
| hin-fra | tico19-test | 0.53932 | 26.7 | 2100 | 64661 |
| hin-por | tico19-test | 0.60581 | 33.4 | 2100 | 62729 |
| hin-spa | tico19-test | 0.61585 | 35.7 | 2100 | 66563 |
| mar-eng | tico19-test | 0.59329 | 31.8 | 2100 | 56315 |
| mar-fra | tico19-test | 0.46574 | 19.3 | 2100 | 64661 |
| mar-por | tico19-test | 0.51463 | 23.6 | 2100 | 62729 |
| mar-spa | tico19-test | 0.52551 | 25.7 | 2100 | 66563 |
| nep-eng | tico19-test | 0.66283 | 40.7 | 2100 | 56824 |
| nep-fra | tico19-test | 0.50397 | 22.8 | 2100 | 64661 |
| nep-por | tico19-test | 0.55951 | 28.1 | 2100 | 62729 |
| nep-spa | tico19-test | 0.57272 | 30.3 | 2100 | 66563 |
| urd-eng | tico19-test | 0.57473 | 30.5 | 2100 | 56315 |
| urd-fra | tico19-test | 0.46725 | 19.6 | 2100 | 64661 |
| urd-por | tico19-test | 0.50913 | 23.5 | 2100 | 62729 |
| urd-spa | tico19-test | 0.52387 | 25.8 | 2100 | 66563 |
## Citation Information
* Publications: [Democratizing neural machine translation with OPUS-MT](https://doi.org/10.1007/s10579-023-09704-w) and [OPUS-MT – Building open translation services for the World](https://aclanthology.org/2020.eamt-1.61/) and [The Tatoeba Translation Challenge – Realistic Data Sets for Low Resource and Multilingual MT](https://aclanthology.org/2020.wmt-1.139/) (Please, cite if you use this model.)
```bibtex
@article{tiedemann2023democratizing,
title={Democratizing neural machine translation with {OPUS-MT}},
author={Tiedemann, J{\"o}rg and Aulamo, Mikko and Bakshandaeva, Daria and Boggia, Michele and Gr{\"o}nroos, Stig-Arne and Nieminen, Tommi and Raganato, Alessandro and Scherrer, Yves and Vazquez, Raul and Virpioja, Sami},
journal={Language Resources and Evaluation},
number={58},
pages={713--755},
year={2023},
publisher={Springer Nature},
issn={1574-0218},
doi={10.1007/s10579-023-09704-w}
}
@inproceedings{tiedemann-thottingal-2020-opus,
title = "{OPUS}-{MT} {--} Building open translation services for the World",
author = {Tiedemann, J{\"o}rg and Thottingal, Santhosh},
booktitle = "Proceedings of the 22nd Annual Conference of the European Association for Machine Translation",
month = nov,
year = "2020",
address = "Lisboa, Portugal",
publisher = "European Association for Machine Translation",
url = "https://aclanthology.org/2020.eamt-1.61",
pages = "479--480",
}
@inproceedings{tiedemann-2020-tatoeba,
title = "The Tatoeba Translation Challenge {--} Realistic Data Sets for Low Resource and Multilingual {MT}",
author = {Tiedemann, J{\"o}rg},
booktitle = "Proceedings of the Fifth Conference on Machine Translation",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2020.wmt-1.139",
pages = "1174--1182",
}
```
## Acknowledgements
The work is supported by the [HPLT project](https://hplt-project.org/), funded by the European Union’s Horizon Europe research and innovation programme under grant agreement No 101070350. We are also grateful for the generous computational resources and IT infrastructure provided by [CSC -- IT Center for Science](https://www.csc.fi/), Finland, and the [EuroHPC supercomputer LUMI](https://www.lumi-supercomputer.eu/).
## Model conversion info
* transformers version: 4.45.1
* OPUS-MT git hash: 0882077
* port time: Tue Oct 8 11:39:25 EEST 2024
* port machine: LM0-400-22516.local