Datasets:
mteb
/

Roman Solomatin commited on
Commit
34dfeca
·
unverified ·
1 Parent(s): 4f09101

add russian models results

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. paths.json +0 -0
  2. results.py +10 -1
  3. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/BUCC.v2.json +23 -0
  4. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/BelebeleRetrieval.json +455 -0
  5. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/BibleNLPBitextMining.json +35 -0
  6. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/CEDRClassification.json +73 -0
  7. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/CyrillicTurkicLangClassification.json +81 -0
  8. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/FloresBitextMining.json +0 -0
  9. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/GeoreviewClassification.json +73 -0
  10. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/GeoreviewClusteringP2P.json +34 -0
  11. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/HeadlineClassification.json +73 -0
  12. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/InappropriatenessClassification.json +95 -0
  13. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/KinopoiskClassification.json +73 -0
  14. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/LanguageClassification.json +92 -0
  15. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/MLSUMClusteringP2P.json +55 -0
  16. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/MLSUMClusteringP2P.v2.json +34 -0
  17. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/MLSUMClusteringS2S.json +55 -0
  18. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/MLSUMClusteringS2S.v2.json +59 -0
  19. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/MassiveIntentClassification.json +137 -0
  20. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/MassiveScenarioClassification.json +137 -0
  21. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/MultiLongDocRetrieval.json +307 -0
  22. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/MultilingualSentimentClassification.json +95 -0
  23. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/NTREXBitextMining.json +899 -0
  24. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/OpusparcusPC.json +105 -0
  25. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/PublicHealthQA.json +158 -0
  26. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/RUParaPhraserSTS.json +32 -0
  27. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/RiaNewsRetrieval.json +158 -0
  28. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/RuBQReranking.json +26 -0
  29. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/RuBQRetrieval.json +158 -0
  30. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/RuReviewsClassification.json +73 -0
  31. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/RuSTSBenchmarkSTS.json +32 -0
  32. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/RuSciBenchGRNTIClassification.json +73 -0
  33. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/RuSciBenchGRNTIClusteringP2P.json +34 -0
  34. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/RuSciBenchOECDClassification.json +73 -0
  35. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/RuSciBenchOECDClusteringP2P.json +34 -0
  36. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/SIB200Classification.json +201 -0
  37. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/SIB200ClusteringS2S.json +34 -0
  38. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/STS22.json +32 -0
  39. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/STS22.v2.json +32 -0
  40. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/STSBenchmarkMultilingualSTS.json +55 -0
  41. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/SensitiveTopicsClassification.json +73 -0
  42. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/TERRa.json +68 -0
  43. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/Tatoeba.json +23 -0
  44. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/XNLI.json +105 -0
  45. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/XNLIV2.json +57 -0
  46. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/XQuADRetrieval.json +158 -0
  47. results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/model_meta.json +1 -0
  48. results/LaBSE/e34fab64a3011d2176c99545a93d5cbddc9a91b7/GPUSpeedTask.json +53 -0
  49. results/LaBSE/e34fab64a3011d2176c99545a93d5cbddc9a91b7/LanguageClassification.json +92 -0
  50. results/LaBSE/e34fab64a3011d2176c99545a93d5cbddc9a91b7/MLSUMClusteringP2P.json +55 -0
paths.json CHANGED
The diff for this file is too large to render. See raw diff
 
results.py CHANGED
@@ -33,7 +33,7 @@ TRAIN_SPLIT = ["DanishPoliticalCommentsClassification"]
33
  # Use "validation" split instead
34
  VALIDATION_SPLIT = ["AFQMC", "Cmnli", "IFlyTek", "LEMBSummScreenFDRetrieval", "MSMARCO", "MSMARCO-PL", "MultilingualSentiment", "Ocnli", "TNews"]
35
  # Use "dev" split instead
36
- DEV_SPLIT = ["CmedqaRetrieval", "CovidRetrieval", "DuRetrieval", "EcomRetrieval", "MedicalRetrieval", "MMarcoReranking", "MMarcoRetrieval", "MSMARCO", "MSMARCO-PL", "T2Reranking", "T2Retrieval", "VideoRetrieval"]
37
  # Use "test.full" split
38
  TESTFULL_SPLIT = ["OpusparcusPC"]
39
  # Use "standard" split
@@ -235,6 +235,15 @@ MODELS = [
235
  "voyage-multilingual-2",
236
  "xlm-roberta-base",
237
  "xlm-roberta-large",
 
 
 
 
 
 
 
 
 
238
  ]
239
 
240
 
 
33
  # Use "validation" split instead
34
  VALIDATION_SPLIT = ["AFQMC", "Cmnli", "IFlyTek", "LEMBSummScreenFDRetrieval", "MSMARCO", "MSMARCO-PL", "MultilingualSentiment", "Ocnli", "TNews"]
35
  # Use "dev" split instead
36
+ DEV_SPLIT = ["CmedqaRetrieval", "CovidRetrieval", "DuRetrieval", "EcomRetrieval", "MedicalRetrieval", "MMarcoReranking", "MMarcoRetrieval", "MSMARCO", "MSMARCO-PL", "T2Reranking", "T2Retrieval", "VideoRetrieval", "TERRa",]
37
  # Use "test.full" split
38
  TESTFULL_SPLIT = ["OpusparcusPC"]
39
  # Use "standard" split
 
235
  "voyage-multilingual-2",
236
  "xlm-roberta-base",
237
  "xlm-roberta-large",
238
+ "deberta-v1-base",
239
+ "USER-bge-m3",
240
+ "USER-base",
241
+ "rubert-tiny-turbo",
242
+ "LaBSE-ru-turbo",
243
+ "distilrubert-small-cased-conversational",
244
+ "rubert-base-cased",
245
+ "rubert-base-cased-sentence",
246
+ "LaBSE-en-ru",
247
  ]
248
 
249
 
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/BUCC.v2.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "1739dc11ffe9b7bfccd7f3d585aeb4c544fc6677",
3
+ "evaluation_time": 16.575636386871338,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.85",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.9797713889851056,
10
+ "f1": 0.9734441750375247,
11
+ "hf_subset": "ru-en",
12
+ "languages": [
13
+ "rus-Cyrl",
14
+ "eng-Latn"
15
+ ],
16
+ "main_score": 0.9734441750375247,
17
+ "precision": 0.9702921140745872,
18
+ "recall": 0.9797713889851056
19
+ }
20
+ ]
21
+ },
22
+ "task_name": "BUCC.v2"
23
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/BelebeleRetrieval.json ADDED
@@ -0,0 +1,455 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "75b399394a9803252cfec289d103de462763db7c",
3
+ "evaluation_time": 7.6180665493011475,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.85",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "rus_Cyrl-rus_Cyrl",
10
+ "languages": [
11
+ "rus-Cyrl",
12
+ "rus-Cyrl"
13
+ ],
14
+ "main_score": 0.92668,
15
+ "map_at_1": 0.87667,
16
+ "map_at_10": 0.91126,
17
+ "map_at_100": 0.91219,
18
+ "map_at_1000": 0.91222,
19
+ "map_at_20": 0.91195,
20
+ "map_at_3": 0.90519,
21
+ "map_at_5": 0.90907,
22
+ "mrr_at_1": 0.8766666666666667,
23
+ "mrr_at_10": 0.9112597001763673,
24
+ "mrr_at_100": 0.9121887254608686,
25
+ "mrr_at_1000": 0.9122211146445585,
26
+ "mrr_at_20": 0.9119468590357397,
27
+ "mrr_at_3": 0.9051851851851855,
28
+ "mrr_at_5": 0.9090740740740744,
29
+ "nauc_map_at_1000_diff1": 0.9002142263997838,
30
+ "nauc_map_at_1000_max": 0.8631290098123422,
31
+ "nauc_map_at_1000_std": 0.09708820492618626,
32
+ "nauc_map_at_100_diff1": 0.9002292437921736,
33
+ "nauc_map_at_100_max": 0.8631803869573752,
34
+ "nauc_map_at_100_std": 0.09739350050217861,
35
+ "nauc_map_at_10_diff1": 0.8998558982824476,
36
+ "nauc_map_at_10_max": 0.862553276514086,
37
+ "nauc_map_at_10_std": 0.09735425834696392,
38
+ "nauc_map_at_1_diff1": 0.908380124373604,
39
+ "nauc_map_at_1_max": 0.857703487118448,
40
+ "nauc_map_at_1_std": 0.07901432625214454,
41
+ "nauc_map_at_20_diff1": 0.9003151548597751,
42
+ "nauc_map_at_20_max": 0.8633665105706486,
43
+ "nauc_map_at_20_std": 0.09675693401622591,
44
+ "nauc_map_at_3_diff1": 0.89894482625683,
45
+ "nauc_map_at_3_max": 0.8640581096209302,
46
+ "nauc_map_at_3_std": 0.09018371477502721,
47
+ "nauc_map_at_5_diff1": 0.8994968484124081,
48
+ "nauc_map_at_5_max": 0.8620191628903708,
49
+ "nauc_map_at_5_std": 0.09968812918124696,
50
+ "nauc_mrr_at_1000_diff1": 0.9002142263997838,
51
+ "nauc_mrr_at_1000_max": 0.8631290098123422,
52
+ "nauc_mrr_at_1000_std": 0.09708820492618626,
53
+ "nauc_mrr_at_100_diff1": 0.9002292437921736,
54
+ "nauc_mrr_at_100_max": 0.8631803869573752,
55
+ "nauc_mrr_at_100_std": 0.09739350050217861,
56
+ "nauc_mrr_at_10_diff1": 0.8998558982824476,
57
+ "nauc_mrr_at_10_max": 0.862553276514086,
58
+ "nauc_mrr_at_10_std": 0.09735425834696392,
59
+ "nauc_mrr_at_1_diff1": 0.908380124373604,
60
+ "nauc_mrr_at_1_max": 0.857703487118448,
61
+ "nauc_mrr_at_1_std": 0.07901432625214454,
62
+ "nauc_mrr_at_20_diff1": 0.9003151548597751,
63
+ "nauc_mrr_at_20_max": 0.8633665105706486,
64
+ "nauc_mrr_at_20_std": 0.09675693401622591,
65
+ "nauc_mrr_at_3_diff1": 0.89894482625683,
66
+ "nauc_mrr_at_3_max": 0.8640581096209302,
67
+ "nauc_mrr_at_3_std": 0.09018371477502721,
68
+ "nauc_mrr_at_5_diff1": 0.8994968484124081,
69
+ "nauc_mrr_at_5_max": 0.8620191628903708,
70
+ "nauc_mrr_at_5_std": 0.09968812918124696,
71
+ "nauc_ndcg_at_1000_diff1": 0.8996036058720834,
72
+ "nauc_ndcg_at_1000_max": 0.8646088241152043,
73
+ "nauc_ndcg_at_1000_std": 0.10371176161894702,
74
+ "nauc_ndcg_at_100_diff1": 0.8998540437959677,
75
+ "nauc_ndcg_at_100_max": 0.8660742787786956,
76
+ "nauc_ndcg_at_100_std": 0.11225945522831975,
77
+ "nauc_ndcg_at_10_diff1": 0.8986059363366913,
78
+ "nauc_ndcg_at_10_max": 0.8641899783525004,
79
+ "nauc_ndcg_at_10_std": 0.10798871552914628,
80
+ "nauc_ndcg_at_1_diff1": 0.908380124373604,
81
+ "nauc_ndcg_at_1_max": 0.857703487118448,
82
+ "nauc_ndcg_at_1_std": 0.07901432625214454,
83
+ "nauc_ndcg_at_20_diff1": 0.9005327043747465,
84
+ "nauc_ndcg_at_20_max": 0.8676298315822463,
85
+ "nauc_ndcg_at_20_std": 0.10688330516396452,
86
+ "nauc_ndcg_at_3_diff1": 0.8959498741410133,
87
+ "nauc_ndcg_at_3_max": 0.8662698293671856,
88
+ "nauc_ndcg_at_3_std": 0.09512157712993898,
89
+ "nauc_ndcg_at_5_diff1": 0.8970537982897169,
90
+ "nauc_ndcg_at_5_max": 0.8621090589519832,
91
+ "nauc_ndcg_at_5_std": 0.11466170984156673,
92
+ "nauc_precision_at_1000_diff1": NaN,
93
+ "nauc_precision_at_1000_max": NaN,
94
+ "nauc_precision_at_1000_std": NaN,
95
+ "nauc_precision_at_100_diff1": 0.9183006535947483,
96
+ "nauc_precision_at_100_max": 1.0,
97
+ "nauc_precision_at_100_std": 0.888888888888893,
98
+ "nauc_precision_at_10_diff1": 0.8925222262818151,
99
+ "nauc_precision_at_10_max": 0.8816628100515531,
100
+ "nauc_precision_at_10_std": 0.20975926602525344,
101
+ "nauc_precision_at_1_diff1": 0.908380124373604,
102
+ "nauc_precision_at_1_max": 0.857703487118448,
103
+ "nauc_precision_at_1_std": 0.07901432625214454,
104
+ "nauc_precision_at_20_diff1": 0.9218020541549966,
105
+ "nauc_precision_at_20_max": 0.950980392156856,
106
+ "nauc_precision_at_20_std": 0.26653994931304675,
107
+ "nauc_precision_at_3_diff1": 0.883070512155482,
108
+ "nauc_precision_at_3_max": 0.8759034477988762,
109
+ "nauc_precision_at_3_std": 0.11723207801639118,
110
+ "nauc_precision_at_5_diff1": 0.8835323603125471,
111
+ "nauc_precision_at_5_max": 0.8608776844070952,
112
+ "nauc_precision_at_5_std": 0.2109317411174992,
113
+ "nauc_recall_at_1000_diff1": NaN,
114
+ "nauc_recall_at_1000_max": NaN,
115
+ "nauc_recall_at_1000_std": NaN,
116
+ "nauc_recall_at_100_diff1": 0.9183006535947761,
117
+ "nauc_recall_at_100_max": 1.0,
118
+ "nauc_recall_at_100_std": 0.8888888888889124,
119
+ "nauc_recall_at_10_diff1": 0.8925222262818165,
120
+ "nauc_recall_at_10_max": 0.8816628100515546,
121
+ "nauc_recall_at_10_std": 0.2097592660252506,
122
+ "nauc_recall_at_1_diff1": 0.908380124373604,
123
+ "nauc_recall_at_1_max": 0.857703487118448,
124
+ "nauc_recall_at_1_std": 0.07901432625214454,
125
+ "nauc_recall_at_20_diff1": 0.9218020541549954,
126
+ "nauc_recall_at_20_max": 0.9509803921568608,
127
+ "nauc_recall_at_20_std": 0.2665399493130565,
128
+ "nauc_recall_at_3_diff1": 0.8830705121554795,
129
+ "nauc_recall_at_3_max": 0.8759034477988749,
130
+ "nauc_recall_at_3_std": 0.11723207801639368,
131
+ "nauc_recall_at_5_diff1": 0.8835323603125492,
132
+ "nauc_recall_at_5_max": 0.8608776844070981,
133
+ "nauc_recall_at_5_std": 0.21093174111750107,
134
+ "ndcg_at_1": 0.87667,
135
+ "ndcg_at_10": 0.92668,
136
+ "ndcg_at_100": 0.93103,
137
+ "ndcg_at_1000": 0.93177,
138
+ "ndcg_at_20": 0.92919,
139
+ "ndcg_at_3": 0.91415,
140
+ "ndcg_at_5": 0.92132,
141
+ "precision_at_1": 0.87667,
142
+ "precision_at_10": 0.09744,
143
+ "precision_at_100": 0.00994,
144
+ "precision_at_1000": 0.001,
145
+ "precision_at_20": 0.04922,
146
+ "precision_at_3": 0.31333,
147
+ "precision_at_5": 0.19156,
148
+ "recall_at_1": 0.87667,
149
+ "recall_at_10": 0.97444,
150
+ "recall_at_100": 0.99444,
151
+ "recall_at_1000": 1.0,
152
+ "recall_at_20": 0.98444,
153
+ "recall_at_3": 0.94,
154
+ "recall_at_5": 0.95778
155
+ },
156
+ {
157
+ "hf_subset": "rus_Cyrl-eng_Latn",
158
+ "languages": [
159
+ "rus-Cyrl",
160
+ "eng-Latn"
161
+ ],
162
+ "main_score": 0.83786,
163
+ "map_at_1": 0.75222,
164
+ "map_at_10": 0.81025,
165
+ "map_at_100": 0.8131,
166
+ "map_at_1000": 0.81319,
167
+ "map_at_20": 0.81216,
168
+ "map_at_3": 0.79852,
169
+ "map_at_5": 0.80369,
170
+ "mrr_at_1": 0.7522222222222222,
171
+ "mrr_at_10": 0.8102513227513235,
172
+ "mrr_at_100": 0.8131029676200705,
173
+ "mrr_at_1000": 0.8131893151925148,
174
+ "mrr_at_20": 0.8121639098202571,
175
+ "mrr_at_3": 0.7985185185185187,
176
+ "mrr_at_5": 0.8036851851851856,
177
+ "nauc_map_at_1000_diff1": 0.8362342401901434,
178
+ "nauc_map_at_1000_max": 0.7709393068586414,
179
+ "nauc_map_at_1000_std": 0.18014605824658386,
180
+ "nauc_map_at_100_diff1": 0.8362002187534311,
181
+ "nauc_map_at_100_max": 0.7708947397659732,
182
+ "nauc_map_at_100_std": 0.18031279607464032,
183
+ "nauc_map_at_10_diff1": 0.8340666080106514,
184
+ "nauc_map_at_10_max": 0.7720288344632558,
185
+ "nauc_map_at_10_std": 0.18240596039508405,
186
+ "nauc_map_at_1_diff1": 0.8620927143630194,
187
+ "nauc_map_at_1_max": 0.7618094972603605,
188
+ "nauc_map_at_1_std": 0.15633630918218444,
189
+ "nauc_map_at_20_diff1": 0.8353645660468875,
190
+ "nauc_map_at_20_max": 0.7709662465283382,
191
+ "nauc_map_at_20_std": 0.18102978255715088,
192
+ "nauc_map_at_3_diff1": 0.8374243653287432,
193
+ "nauc_map_at_3_max": 0.7730597106094721,
194
+ "nauc_map_at_3_std": 0.1764310498726604,
195
+ "nauc_map_at_5_diff1": 0.8345225398172936,
196
+ "nauc_map_at_5_max": 0.7741898760702479,
197
+ "nauc_map_at_5_std": 0.17539681047164343,
198
+ "nauc_mrr_at_1000_diff1": 0.8362342401901434,
199
+ "nauc_mrr_at_1000_max": 0.7709393068586414,
200
+ "nauc_mrr_at_1000_std": 0.18014605824658386,
201
+ "nauc_mrr_at_100_diff1": 0.8362002187534311,
202
+ "nauc_mrr_at_100_max": 0.7708947397659732,
203
+ "nauc_mrr_at_100_std": 0.18031279607464032,
204
+ "nauc_mrr_at_10_diff1": 0.8340666080106514,
205
+ "nauc_mrr_at_10_max": 0.7720288344632558,
206
+ "nauc_mrr_at_10_std": 0.18240596039508405,
207
+ "nauc_mrr_at_1_diff1": 0.8620927143630194,
208
+ "nauc_mrr_at_1_max": 0.7618094972603605,
209
+ "nauc_mrr_at_1_std": 0.15633630918218444,
210
+ "nauc_mrr_at_20_diff1": 0.8353645660468875,
211
+ "nauc_mrr_at_20_max": 0.7709662465283382,
212
+ "nauc_mrr_at_20_std": 0.18102978255715088,
213
+ "nauc_mrr_at_3_diff1": 0.8374243653287432,
214
+ "nauc_mrr_at_3_max": 0.7730597106094721,
215
+ "nauc_mrr_at_3_std": 0.1764310498726604,
216
+ "nauc_mrr_at_5_diff1": 0.8345225398172936,
217
+ "nauc_mrr_at_5_max": 0.7741898760702479,
218
+ "nauc_mrr_at_5_std": 0.17539681047164343,
219
+ "nauc_ndcg_at_1000_diff1": 0.8320402729267309,
220
+ "nauc_ndcg_at_1000_max": 0.7714003996193017,
221
+ "nauc_ndcg_at_1000_std": 0.18837463458653286,
222
+ "nauc_ndcg_at_100_diff1": 0.8313581690761995,
223
+ "nauc_ndcg_at_100_max": 0.7703213765030679,
224
+ "nauc_ndcg_at_100_std": 0.1942487822804117,
225
+ "nauc_ndcg_at_10_diff1": 0.82103246781316,
226
+ "nauc_ndcg_at_10_max": 0.7751299570980865,
227
+ "nauc_ndcg_at_10_std": 0.20301666118828246,
228
+ "nauc_ndcg_at_1_diff1": 0.8620927143630194,
229
+ "nauc_ndcg_at_1_max": 0.7618094972603605,
230
+ "nauc_ndcg_at_1_std": 0.15633630918218444,
231
+ "nauc_ndcg_at_20_diff1": 0.8262288417063813,
232
+ "nauc_ndcg_at_20_max": 0.7706418417195513,
233
+ "nauc_ndcg_at_20_std": 0.19852693597182078,
234
+ "nauc_ndcg_at_3_diff1": 0.828576757206663,
235
+ "nauc_ndcg_at_3_max": 0.7771040208173721,
236
+ "nauc_ndcg_at_3_std": 0.18600509814914096,
237
+ "nauc_ndcg_at_5_diff1": 0.8222868882565071,
238
+ "nauc_ndcg_at_5_max": 0.7794095686072242,
239
+ "nauc_ndcg_at_5_std": 0.1840169494750817,
240
+ "nauc_precision_at_1000_diff1": NaN,
241
+ "nauc_precision_at_1000_max": NaN,
242
+ "nauc_precision_at_1000_std": NaN,
243
+ "nauc_precision_at_100_diff1": 0.7911164465786305,
244
+ "nauc_precision_at_100_max": 0.7047485660931255,
245
+ "nauc_precision_at_100_std": 0.6024076297185628,
246
+ "nauc_precision_at_10_diff1": 0.7343878727961759,
247
+ "nauc_precision_at_10_max": 0.7948522546273378,
248
+ "nauc_precision_at_10_std": 0.3537032460042791,
249
+ "nauc_precision_at_1_diff1": 0.8620927143630194,
250
+ "nauc_precision_at_1_max": 0.7618094972603605,
251
+ "nauc_precision_at_1_std": 0.15633630918218444,
252
+ "nauc_precision_at_20_diff1": 0.7467048835037896,
253
+ "nauc_precision_at_20_max": 0.7530454042081964,
254
+ "nauc_precision_at_20_std": 0.39139686882504704,
255
+ "nauc_precision_at_3_diff1": 0.795023572551076,
256
+ "nauc_precision_at_3_max": 0.7924455586320455,
257
+ "nauc_precision_at_3_std": 0.22336301728653848,
258
+ "nauc_precision_at_5_diff1": 0.7680549517433265,
259
+ "nauc_precision_at_5_max": 0.8022824102251969,
260
+ "nauc_precision_at_5_std": 0.22140683418833176,
261
+ "nauc_recall_at_1000_diff1": NaN,
262
+ "nauc_recall_at_1000_max": NaN,
263
+ "nauc_recall_at_1000_std": NaN,
264
+ "nauc_recall_at_100_diff1": 0.7911164465786332,
265
+ "nauc_recall_at_100_max": 0.7047485660931027,
266
+ "nauc_recall_at_100_std": 0.6024076297185507,
267
+ "nauc_recall_at_10_diff1": 0.7343878727961768,
268
+ "nauc_recall_at_10_max": 0.7948522546273411,
269
+ "nauc_recall_at_10_std": 0.3537032460042859,
270
+ "nauc_recall_at_1_diff1": 0.8620927143630194,
271
+ "nauc_recall_at_1_max": 0.7618094972603605,
272
+ "nauc_recall_at_1_std": 0.15633630918218444,
273
+ "nauc_recall_at_20_diff1": 0.746704883503787,
274
+ "nauc_recall_at_20_max": 0.7530454042081945,
275
+ "nauc_recall_at_20_std": 0.3913968688250488,
276
+ "nauc_recall_at_3_diff1": 0.7950235725510737,
277
+ "nauc_recall_at_3_max": 0.7924455586320431,
278
+ "nauc_recall_at_3_std": 0.22336301728653749,
279
+ "nauc_recall_at_5_diff1": 0.7680549517433267,
280
+ "nauc_recall_at_5_max": 0.8022824102251974,
281
+ "nauc_recall_at_5_std": 0.22140683418833096,
282
+ "ndcg_at_1": 0.75222,
283
+ "ndcg_at_10": 0.83786,
284
+ "ndcg_at_100": 0.85106,
285
+ "ndcg_at_1000": 0.8531,
286
+ "ndcg_at_20": 0.84486,
287
+ "ndcg_at_3": 0.8132,
288
+ "ndcg_at_5": 0.82243,
289
+ "precision_at_1": 0.75222,
290
+ "precision_at_10": 0.09244,
291
+ "precision_at_100": 0.00984,
292
+ "precision_at_1000": 0.001,
293
+ "precision_at_20": 0.04761,
294
+ "precision_at_3": 0.28519,
295
+ "precision_at_5": 0.17556,
296
+ "recall_at_1": 0.75222,
297
+ "recall_at_10": 0.92444,
298
+ "recall_at_100": 0.98444,
299
+ "recall_at_1000": 1.0,
300
+ "recall_at_20": 0.95222,
301
+ "recall_at_3": 0.85556,
302
+ "recall_at_5": 0.87778
303
+ },
304
+ {
305
+ "hf_subset": "eng_Latn-rus_Cyrl",
306
+ "languages": [
307
+ "eng-Latn",
308
+ "rus-Cyrl"
309
+ ],
310
+ "main_score": 0.89009,
311
+ "map_at_1": 0.81667,
312
+ "map_at_10": 0.86748,
313
+ "map_at_100": 0.86905,
314
+ "map_at_1000": 0.86908,
315
+ "map_at_20": 0.86848,
316
+ "map_at_3": 0.85722,
317
+ "map_at_5": 0.86394,
318
+ "mrr_at_1": 0.8166666666666667,
319
+ "mrr_at_10": 0.8674779541446214,
320
+ "mrr_at_100": 0.8690515446510788,
321
+ "mrr_at_1000": 0.8690806433040723,
322
+ "mrr_at_20": 0.868483382358383,
323
+ "mrr_at_3": 0.8572222222222224,
324
+ "mrr_at_5": 0.8639444444444451,
325
+ "nauc_map_at_1000_diff1": 0.8643380445978972,
326
+ "nauc_map_at_1000_max": 0.7723368013766394,
327
+ "nauc_map_at_1000_std": 0.08530443292559109,
328
+ "nauc_map_at_100_diff1": 0.864319602268892,
329
+ "nauc_map_at_100_max": 0.7723686087055622,
330
+ "nauc_map_at_100_std": 0.08549027779194206,
331
+ "nauc_map_at_10_diff1": 0.8633639485864444,
332
+ "nauc_map_at_10_max": 0.7732516302658029,
333
+ "nauc_map_at_10_std": 0.0846567284405085,
334
+ "nauc_map_at_1_diff1": 0.8806214842222417,
335
+ "nauc_map_at_1_max": 0.7575038353939165,
336
+ "nauc_map_at_1_std": 0.0795806635983519,
337
+ "nauc_map_at_20_diff1": 0.8643773763822606,
338
+ "nauc_map_at_20_max": 0.7726790806924059,
339
+ "nauc_map_at_20_std": 0.08680922556483701,
340
+ "nauc_map_at_3_diff1": 0.8600380555075058,
341
+ "nauc_map_at_3_max": 0.7724422065123657,
342
+ "nauc_map_at_3_std": 0.0750882890299374,
343
+ "nauc_map_at_5_diff1": 0.861073619114629,
344
+ "nauc_map_at_5_max": 0.7706668240555979,
345
+ "nauc_map_at_5_std": 0.08328286574967704,
346
+ "nauc_mrr_at_1000_diff1": 0.8643380445978972,
347
+ "nauc_mrr_at_1000_max": 0.7723368013766394,
348
+ "nauc_mrr_at_1000_std": 0.08530443292559109,
349
+ "nauc_mrr_at_100_diff1": 0.864319602268892,
350
+ "nauc_mrr_at_100_max": 0.7723686087055622,
351
+ "nauc_mrr_at_100_std": 0.08549027779194206,
352
+ "nauc_mrr_at_10_diff1": 0.8633639485864444,
353
+ "nauc_mrr_at_10_max": 0.7732516302658029,
354
+ "nauc_mrr_at_10_std": 0.0846567284405085,
355
+ "nauc_mrr_at_1_diff1": 0.8806214842222417,
356
+ "nauc_mrr_at_1_max": 0.7575038353939165,
357
+ "nauc_mrr_at_1_std": 0.0795806635983519,
358
+ "nauc_mrr_at_20_diff1": 0.8643773763822606,
359
+ "nauc_mrr_at_20_max": 0.7726790806924059,
360
+ "nauc_mrr_at_20_std": 0.08680922556483701,
361
+ "nauc_mrr_at_3_diff1": 0.8600380555075058,
362
+ "nauc_mrr_at_3_max": 0.7724422065123657,
363
+ "nauc_mrr_at_3_std": 0.0750882890299374,
364
+ "nauc_mrr_at_5_diff1": 0.861073619114629,
365
+ "nauc_mrr_at_5_max": 0.7706668240555979,
366
+ "nauc_mrr_at_5_std": 0.08328286574967704,
367
+ "nauc_ndcg_at_1000_diff1": 0.8626706031051983,
368
+ "nauc_ndcg_at_1000_max": 0.7750245667913902,
369
+ "nauc_ndcg_at_1000_std": 0.0917739218765757,
370
+ "nauc_ndcg_at_100_diff1": 0.8623523201483277,
371
+ "nauc_ndcg_at_100_max": 0.7760594951634858,
372
+ "nauc_ndcg_at_100_std": 0.09742475691997585,
373
+ "nauc_ndcg_at_10_diff1": 0.8588102720437044,
374
+ "nauc_ndcg_at_10_max": 0.7806456227027242,
375
+ "nauc_ndcg_at_10_std": 0.09769448104289541,
376
+ "nauc_ndcg_at_1_diff1": 0.8806214842222417,
377
+ "nauc_ndcg_at_1_max": 0.7575038353939165,
378
+ "nauc_ndcg_at_1_std": 0.0795806635983519,
379
+ "nauc_ndcg_at_20_diff1": 0.8628253037674907,
380
+ "nauc_ndcg_at_20_max": 0.778271597651214,
381
+ "nauc_ndcg_at_20_std": 0.10688630690391747,
382
+ "nauc_ndcg_at_3_diff1": 0.8518763686982234,
383
+ "nauc_ndcg_at_3_max": 0.7771547494771308,
384
+ "nauc_ndcg_at_3_std": 0.07485877485103896,
385
+ "nauc_ndcg_at_5_diff1": 0.8531372346164149,
386
+ "nauc_ndcg_at_5_max": 0.7736643340178706,
387
+ "nauc_ndcg_at_5_std": 0.09169042388993195,
388
+ "nauc_precision_at_1000_diff1": NaN,
389
+ "nauc_precision_at_1000_max": NaN,
390
+ "nauc_precision_at_1000_std": NaN,
391
+ "nauc_precision_at_100_diff1": 0.8291316526610534,
392
+ "nauc_precision_at_100_max": 0.9183006535947483,
393
+ "nauc_precision_at_100_std": 0.8552754435107516,
394
+ "nauc_precision_at_10_diff1": 0.8241518829754126,
395
+ "nauc_precision_at_10_max": 0.8501659923228537,
396
+ "nauc_precision_at_10_std": 0.23557941695196524,
397
+ "nauc_precision_at_1_diff1": 0.8806214842222417,
398
+ "nauc_precision_at_1_max": 0.7575038353939165,
399
+ "nauc_precision_at_1_std": 0.0795806635983519,
400
+ "nauc_precision_at_20_diff1": 0.8648070339246752,
401
+ "nauc_precision_at_20_max": 0.8484866168689655,
402
+ "nauc_precision_at_20_std": 0.4372276688453088,
403
+ "nauc_precision_at_3_diff1": 0.8180644806942381,
404
+ "nauc_precision_at_3_max": 0.7961223705168371,
405
+ "nauc_precision_at_3_std": 0.07461965178228215,
406
+ "nauc_precision_at_5_diff1": 0.8084714111633349,
407
+ "nauc_precision_at_5_max": 0.7884046273876771,
408
+ "nauc_precision_at_5_std": 0.14550000791276738,
409
+ "nauc_recall_at_1000_diff1": NaN,
410
+ "nauc_recall_at_1000_max": NaN,
411
+ "nauc_recall_at_1000_std": NaN,
412
+ "nauc_recall_at_100_diff1": 0.8291316526610856,
413
+ "nauc_recall_at_100_max": 0.9183006535947761,
414
+ "nauc_recall_at_100_std": 0.8552754435107421,
415
+ "nauc_recall_at_10_diff1": 0.8241518829754128,
416
+ "nauc_recall_at_10_max": 0.8501659923228546,
417
+ "nauc_recall_at_10_std": 0.2355794169519683,
418
+ "nauc_recall_at_1_diff1": 0.8806214842222417,
419
+ "nauc_recall_at_1_max": 0.7575038353939165,
420
+ "nauc_recall_at_1_std": 0.0795806635983519,
421
+ "nauc_recall_at_20_diff1": 0.8648070339246773,
422
+ "nauc_recall_at_20_max": 0.8484866168689676,
423
+ "nauc_recall_at_20_std": 0.43722766884531594,
424
+ "nauc_recall_at_3_diff1": 0.8180644806942396,
425
+ "nauc_recall_at_3_max": 0.7961223705168358,
426
+ "nauc_recall_at_3_std": 0.0746196517822821,
427
+ "nauc_recall_at_5_diff1": 0.8084714111633345,
428
+ "nauc_recall_at_5_max": 0.7884046273876769,
429
+ "nauc_recall_at_5_std": 0.14550000791276985,
430
+ "ndcg_at_1": 0.81667,
431
+ "ndcg_at_10": 0.89009,
432
+ "ndcg_at_100": 0.89753,
433
+ "ndcg_at_1000": 0.89825,
434
+ "ndcg_at_20": 0.89356,
435
+ "ndcg_at_3": 0.86969,
436
+ "ndcg_at_5": 0.8817,
437
+ "precision_at_1": 0.81667,
438
+ "precision_at_10": 0.096,
439
+ "precision_at_100": 0.00994,
440
+ "precision_at_1000": 0.001,
441
+ "precision_at_20": 0.04867,
442
+ "precision_at_3": 0.30185,
443
+ "precision_at_5": 0.18689,
444
+ "recall_at_1": 0.81667,
445
+ "recall_at_10": 0.96,
446
+ "recall_at_100": 0.99444,
447
+ "recall_at_1000": 1.0,
448
+ "recall_at_20": 0.97333,
449
+ "recall_at_3": 0.90556,
450
+ "recall_at_5": 0.93444
451
+ }
452
+ ]
453
+ },
454
+ "task_name": "BelebeleRetrieval"
455
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/BibleNLPBitextMining.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "264a18480c529d9e922483839b4b9758e690b762",
3
+ "evaluation_time": 1.0280451774597168,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.85",
6
+ "scores": {
7
+ "train": [
8
+ {
9
+ "accuracy": 0.99609375,
10
+ "f1": 0.9947916666666666,
11
+ "hf_subset": "eng_Latn-rus_Cyrl",
12
+ "languages": [
13
+ "eng-Latn",
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.9947916666666666,
17
+ "precision": 0.994140625,
18
+ "recall": 0.99609375
19
+ },
20
+ {
21
+ "accuracy": 0.98828125,
22
+ "f1": 0.984375,
23
+ "hf_subset": "rus_Cyrl-eng_Latn",
24
+ "languages": [
25
+ "rus-Cyrl",
26
+ "eng-Latn"
27
+ ],
28
+ "main_score": 0.984375,
29
+ "precision": 0.982421875,
30
+ "recall": 0.98828125
31
+ }
32
+ ]
33
+ },
34
+ "task_name": "BibleNLPBitextMining"
35
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/CEDRClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "c0ba03d058e3e1b2f3fd20518875a4563dd12db4",
3
+ "evaluation_time": 3.951763391494751,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.49",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.45106269925611053,
10
+ "f1": 0.3889139272532264,
11
+ "hf_subset": "default",
12
+ "languages": [
13
+ "rus-Cyrl"
14
+ ],
15
+ "lrap": 0.6916312433581407,
16
+ "main_score": 0.45106269925611053,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.487778958554729,
20
+ "f1": 0.4263648735030712,
21
+ "lrap": 0.6915515409139324
22
+ },
23
+ {
24
+ "accuracy": 0.4957492029755579,
25
+ "f1": 0.41283709069024777,
26
+ "lrap": 0.7207226354941665
27
+ },
28
+ {
29
+ "accuracy": 0.44580233793836344,
30
+ "f1": 0.36010479095530523,
31
+ "lrap": 0.6596705632306177
32
+ },
33
+ {
34
+ "accuracy": 0.43198724760892665,
35
+ "f1": 0.4070996839442266,
36
+ "lrap": 0.6868225292242405
37
+ },
38
+ {
39
+ "accuracy": 0.4718384697130712,
40
+ "f1": 0.37381147617227817,
41
+ "lrap": 0.7048352816153138
42
+ },
43
+ {
44
+ "accuracy": 0.4261424017003188,
45
+ "f1": 0.36860943401981777,
46
+ "lrap": 0.6766206163655797
47
+ },
48
+ {
49
+ "accuracy": 0.410201912858661,
50
+ "f1": 0.372294708530453,
51
+ "lrap": 0.6853347502656861
52
+ },
53
+ {
54
+ "accuracy": 0.4585547290116897,
55
+ "f1": 0.3678908113410025,
56
+ "lrap": 0.7057385759830069
57
+ },
58
+ {
59
+ "accuracy": 0.4394261424017003,
60
+ "f1": 0.3662705067079089,
61
+ "lrap": 0.6802869287991615
62
+ },
63
+ {
64
+ "accuracy": 0.44314558979808716,
65
+ "f1": 0.43385589666795266,
66
+ "lrap": 0.704729011689702
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "CEDRClassification"
73
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/CyrillicTurkicLangClassification.json ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "e42d330f33d65b7b72dfd408883daf1661f06f18",
3
+ "evaluation_time": 11.126097679138184,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.49",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.52666015625,
10
+ "f1": 0.5029982073019428,
11
+ "f1_weighted": 0.503123846352733,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "bak-Cyrl",
15
+ "chv-Cyrl",
16
+ "tat-Cyrl",
17
+ "kir-Cyrl",
18
+ "rus-Cyrl",
19
+ "kaz-Cyrl",
20
+ "tyv-Cyrl",
21
+ "krc-Cyrl",
22
+ "sah-Cyrl"
23
+ ],
24
+ "main_score": 0.52666015625,
25
+ "scores_per_experiment": [
26
+ {
27
+ "accuracy": 0.5791015625,
28
+ "f1": 0.5651898117875704,
29
+ "f1_weighted": 0.5653058533825627
30
+ },
31
+ {
32
+ "accuracy": 0.50146484375,
33
+ "f1": 0.46259946973780736,
34
+ "f1_weighted": 0.46269049915801064
35
+ },
36
+ {
37
+ "accuracy": 0.50390625,
38
+ "f1": 0.4921234338264859,
39
+ "f1_weighted": 0.49216006202963897
40
+ },
41
+ {
42
+ "accuracy": 0.548828125,
43
+ "f1": 0.5231900964679697,
44
+ "f1_weighted": 0.5233687955964221
45
+ },
46
+ {
47
+ "accuracy": 0.5498046875,
48
+ "f1": 0.520667898217283,
49
+ "f1_weighted": 0.5208508177476132
50
+ },
51
+ {
52
+ "accuracy": 0.5009765625,
53
+ "f1": 0.4853538287312669,
54
+ "f1_weighted": 0.48547390696658155
55
+ },
56
+ {
57
+ "accuracy": 0.4658203125,
58
+ "f1": 0.4411270657097543,
59
+ "f1_weighted": 0.4411661542302675
60
+ },
61
+ {
62
+ "accuracy": 0.53955078125,
63
+ "f1": 0.5162879219189533,
64
+ "f1_weighted": 0.5164184951307906
65
+ },
66
+ {
67
+ "accuracy": 0.51513671875,
68
+ "f1": 0.48698476139570485,
69
+ "f1_weighted": 0.48712428509923184
70
+ },
71
+ {
72
+ "accuracy": 0.56201171875,
73
+ "f1": 0.536457785226632,
74
+ "f1_weighted": 0.5366795941862118
75
+ }
76
+ ]
77
+ }
78
+ ]
79
+ },
80
+ "task_name": "CyrillicTurkicLangClassification"
81
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/FloresBitextMining.json ADDED
The diff for this file is too large to render. See raw diff
 
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/GeoreviewClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "3765c0d1de6b7d264bc459433c45e5a75513839c",
3
+ "evaluation_time": 13.446590423583984,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.49",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.460400390625,
10
+ "f1": 0.43841835413732166,
11
+ "f1_weighted": 0.4384173007237379,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.460400390625,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.44677734375,
20
+ "f1": 0.4250682289757721,
21
+ "f1_weighted": 0.425105622728562
22
+ },
23
+ {
24
+ "accuracy": 0.45751953125,
25
+ "f1": 0.44056501124824105,
26
+ "f1_weighted": 0.440517829589428
27
+ },
28
+ {
29
+ "accuracy": 0.45751953125,
30
+ "f1": 0.4152582768868072,
31
+ "f1_weighted": 0.4152530263105265
32
+ },
33
+ {
34
+ "accuracy": 0.4521484375,
35
+ "f1": 0.43638452049300136,
36
+ "f1_weighted": 0.43642179665464265
37
+ },
38
+ {
39
+ "accuracy": 0.46923828125,
40
+ "f1": 0.4385598171048167,
41
+ "f1_weighted": 0.43855689390570685
42
+ },
43
+ {
44
+ "accuracy": 0.4462890625,
45
+ "f1": 0.43287535418355977,
46
+ "f1_weighted": 0.4328823251474887
47
+ },
48
+ {
49
+ "accuracy": 0.47314453125,
50
+ "f1": 0.45844126955094494,
51
+ "f1_weighted": 0.45842557338078
52
+ },
53
+ {
54
+ "accuracy": 0.48095703125,
55
+ "f1": 0.47009966168397843,
56
+ "f1_weighted": 0.4700736643021474
57
+ },
58
+ {
59
+ "accuracy": 0.43505859375,
60
+ "f1": 0.4145285528954628,
61
+ "f1_weighted": 0.41456335972406777
62
+ },
63
+ {
64
+ "accuracy": 0.4853515625,
65
+ "f1": 0.45240284835063244,
66
+ "f1_weighted": 0.4523729154940298
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "GeoreviewClassification"
73
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/GeoreviewClusteringP2P.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "97a313c8fc85b47f13f33e7e9a95c1ad888c7fec",
3
+ "evaluation_time": 12.065478324890137,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.49",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.6454815667450844,
14
+ "v_measure": 0.6454815667450844,
15
+ "v_measure_std": 0.007434000650384461,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.6355514377460344,
19
+ 0.6501704630477265,
20
+ 0.6543084730446548,
21
+ 0.6406550252588781,
22
+ 0.6343196904524325,
23
+ 0.6514531085500614,
24
+ 0.6381832469265022,
25
+ 0.6445038210827504,
26
+ 0.6548625822580176,
27
+ 0.6508078190837862
28
+ ]
29
+ }
30
+ }
31
+ ]
32
+ },
33
+ "task_name": "GeoreviewClusteringP2P"
34
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/HeadlineClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "2fe05ee6b5832cda29f2ef7aaad7b7fe6a3609eb",
3
+ "evaluation_time": 5.707996845245361,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.49",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.6998046875,
10
+ "f1": 0.6986587397882883,
11
+ "f1_weighted": 0.6986742849675426,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.6998046875,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.67529296875,
20
+ "f1": 0.6767125274701676,
21
+ "f1_weighted": 0.6767275428868276
22
+ },
23
+ {
24
+ "accuracy": 0.69921875,
25
+ "f1": 0.6971052499833882,
26
+ "f1_weighted": 0.6971267560834126
27
+ },
28
+ {
29
+ "accuracy": 0.69091796875,
30
+ "f1": 0.6911340511128938,
31
+ "f1_weighted": 0.6911402301474174
32
+ },
33
+ {
34
+ "accuracy": 0.7138671875,
35
+ "f1": 0.7139084743087701,
36
+ "f1_weighted": 0.7139128327837027
37
+ },
38
+ {
39
+ "accuracy": 0.7236328125,
40
+ "f1": 0.7243734564218629,
41
+ "f1_weighted": 0.7244005805747105
42
+ },
43
+ {
44
+ "accuracy": 0.73046875,
45
+ "f1": 0.730184123508848,
46
+ "f1_weighted": 0.7301880768599929
47
+ },
48
+ {
49
+ "accuracy": 0.68212890625,
50
+ "f1": 0.6801170060486866,
51
+ "f1_weighted": 0.6801080941100408
52
+ },
53
+ {
54
+ "accuracy": 0.68798828125,
55
+ "f1": 0.6841436961258743,
56
+ "f1_weighted": 0.6841813757303482
57
+ },
58
+ {
59
+ "accuracy": 0.67236328125,
60
+ "f1": 0.6678870032884024,
61
+ "f1_weighted": 0.6679326568472147
62
+ },
63
+ {
64
+ "accuracy": 0.72216796875,
65
+ "f1": 0.7210218096139883,
66
+ "f1_weighted": 0.721024703651759
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "HeadlineClassification"
73
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/InappropriatenessClassification.json ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "601651fdc45ef243751676e62dd7a19f491c0285",
3
+ "evaluation_time": 6.36400580406189,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.49",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.6138671875,
10
+ "ap": 0.5711020118497604,
11
+ "ap_weighted": 0.5711020118497604,
12
+ "f1": 0.6099462942803939,
13
+ "f1_weighted": 0.6099462942803939,
14
+ "hf_subset": "default",
15
+ "languages": [
16
+ "rus-Cyrl"
17
+ ],
18
+ "main_score": 0.6138671875,
19
+ "scores_per_experiment": [
20
+ {
21
+ "accuracy": 0.64697265625,
22
+ "ap": 0.5982007245111731,
23
+ "ap_weighted": 0.5982007245111731,
24
+ "f1": 0.6455664327160904,
25
+ "f1_weighted": 0.6455664327160904
26
+ },
27
+ {
28
+ "accuracy": 0.63427734375,
29
+ "ap": 0.5824353866766777,
30
+ "ap_weighted": 0.5824353866766777,
31
+ "f1": 0.6313337651397624,
32
+ "f1_weighted": 0.6313337651397624
33
+ },
34
+ {
35
+ "accuracy": 0.63037109375,
36
+ "ap": 0.5842485709884995,
37
+ "ap_weighted": 0.5842485709884995,
38
+ "f1": 0.6292820893819989,
39
+ "f1_weighted": 0.6292820893819989
40
+ },
41
+ {
42
+ "accuracy": 0.5732421875,
43
+ "ap": 0.5415788230482852,
44
+ "ap_weighted": 0.5415788230482852,
45
+ "f1": 0.5725230509394237,
46
+ "f1_weighted": 0.5725230509394237
47
+ },
48
+ {
49
+ "accuracy": 0.62158203125,
50
+ "ap": 0.5749774850632615,
51
+ "ap_weighted": 0.5749774850632615,
52
+ "f1": 0.6214151374314095,
53
+ "f1_weighted": 0.6214151374314095
54
+ },
55
+ {
56
+ "accuracy": 0.5693359375,
57
+ "ap": 0.5392009812384899,
58
+ "ap_weighted": 0.5392009812384899,
59
+ "f1": 0.5689408799988546,
60
+ "f1_weighted": 0.5689408799988546
61
+ },
62
+ {
63
+ "accuracy": 0.671875,
64
+ "ap": 0.6147470238095238,
65
+ "ap_weighted": 0.6147470238095238,
66
+ "f1": 0.6718221072541484,
67
+ "f1_weighted": 0.6718221072541484
68
+ },
69
+ {
70
+ "accuracy": 0.59912109375,
71
+ "ap": 0.5562543532726214,
72
+ "ap_weighted": 0.5562543532726214,
73
+ "f1": 0.575922799854623,
74
+ "f1_weighted": 0.575922799854623
75
+ },
76
+ {
77
+ "accuracy": 0.5927734375,
78
+ "ap": 0.5571611399755501,
79
+ "ap_weighted": 0.5571611399755501,
80
+ "f1": 0.5886111986219216,
81
+ "f1_weighted": 0.5886111986219216
82
+ },
83
+ {
84
+ "accuracy": 0.59912109375,
85
+ "ap": 0.5622156299135219,
86
+ "ap_weighted": 0.5622156299135219,
87
+ "f1": 0.5940454814657077,
88
+ "f1_weighted": 0.5940454814657077
89
+ }
90
+ ]
91
+ }
92
+ ]
93
+ },
94
+ "task_name": "InappropriatenessClassification"
95
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/KinopoiskClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "5911f26666ac11af46cb9c6849d0dc80a378af24",
3
+ "evaluation_time": 19.137253522872925,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.49",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.5358666666666667,
10
+ "f1": 0.520574171502198,
11
+ "f1_weighted": 0.520574171502198,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.5358666666666667,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.464,
20
+ "f1": 0.4641144227845906,
21
+ "f1_weighted": 0.4641144227845906
22
+ },
23
+ {
24
+ "accuracy": 0.532,
25
+ "f1": 0.5014916748618763,
26
+ "f1_weighted": 0.5014916748618764
27
+ },
28
+ {
29
+ "accuracy": 0.5513333333333333,
30
+ "f1": 0.5361739652519795,
31
+ "f1_weighted": 0.5361739652519794
32
+ },
33
+ {
34
+ "accuracy": 0.5293333333333333,
35
+ "f1": 0.5209519790449868,
36
+ "f1_weighted": 0.5209519790449868
37
+ },
38
+ {
39
+ "accuracy": 0.5206666666666667,
40
+ "f1": 0.5122095429926548,
41
+ "f1_weighted": 0.5122095429926548
42
+ },
43
+ {
44
+ "accuracy": 0.5733333333333334,
45
+ "f1": 0.5607525040289517,
46
+ "f1_weighted": 0.5607525040289516
47
+ },
48
+ {
49
+ "accuracy": 0.568,
50
+ "f1": 0.5418267078967264,
51
+ "f1_weighted": 0.5418267078967264
52
+ },
53
+ {
54
+ "accuracy": 0.5246666666666666,
55
+ "f1": 0.5101316179123965,
56
+ "f1_weighted": 0.5101316179123965
57
+ },
58
+ {
59
+ "accuracy": 0.5633333333333334,
60
+ "f1": 0.5329814323386534,
61
+ "f1_weighted": 0.5329814323386534
62
+ },
63
+ {
64
+ "accuracy": 0.532,
65
+ "f1": 0.5251078679091645,
66
+ "f1_weighted": 0.5251078679091645
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "KinopoiskClassification"
73
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/LanguageClassification.json ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "aa56583bf2bc52b0565770607d6fc3faebecf9e2",
3
+ "evaluation_time": 20.951558113098145,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.85",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.58466796875,
10
+ "f1": 0.5634228793486037,
11
+ "f1_weighted": 0.5638704799655637,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "ara-Arab",
15
+ "bul-Cyrl",
16
+ "deu-Latn",
17
+ "ell-Grek",
18
+ "eng-Latn",
19
+ "spa-Latn",
20
+ "fra-Latn",
21
+ "hin-Deva",
22
+ "ita-Latn",
23
+ "jpn-Jpan",
24
+ "nld-Latn",
25
+ "pol-Latn",
26
+ "por-Latn",
27
+ "rus-Cyrl",
28
+ "swa-Latn",
29
+ "tha-Thai",
30
+ "tur-Latn",
31
+ "urd-Arab",
32
+ "vie-Latn",
33
+ "cmn-Hans"
34
+ ],
35
+ "main_score": 0.58466796875,
36
+ "scores_per_experiment": [
37
+ {
38
+ "accuracy": 0.5849609375,
39
+ "f1": 0.566736622829094,
40
+ "f1_weighted": 0.5672100855634257
41
+ },
42
+ {
43
+ "accuracy": 0.548828125,
44
+ "f1": 0.5371148497114137,
45
+ "f1_weighted": 0.5373173771491079
46
+ },
47
+ {
48
+ "accuracy": 0.5849609375,
49
+ "f1": 0.5667503963059322,
50
+ "f1_weighted": 0.567194432702426
51
+ },
52
+ {
53
+ "accuracy": 0.5810546875,
54
+ "f1": 0.5527904820575884,
55
+ "f1_weighted": 0.55315661854952
56
+ },
57
+ {
58
+ "accuracy": 0.572265625,
59
+ "f1": 0.5606993393647322,
60
+ "f1_weighted": 0.5611613522192491
61
+ },
62
+ {
63
+ "accuracy": 0.619140625,
64
+ "f1": 0.5888746562912297,
65
+ "f1_weighted": 0.5893934625139658
66
+ },
67
+ {
68
+ "accuracy": 0.60107421875,
69
+ "f1": 0.5789618960135535,
70
+ "f1_weighted": 0.5794117849760226
71
+ },
72
+ {
73
+ "accuracy": 0.58642578125,
74
+ "f1": 0.5525352855847465,
75
+ "f1_weighted": 0.5530775668719148
76
+ },
77
+ {
78
+ "accuracy": 0.56884765625,
79
+ "f1": 0.5502276842056322,
80
+ "f1_weighted": 0.5507430840582512
81
+ },
82
+ {
83
+ "accuracy": 0.59912109375,
84
+ "f1": 0.5795375811221148,
85
+ "f1_weighted": 0.5800390350517537
86
+ }
87
+ ]
88
+ }
89
+ ]
90
+ },
91
+ "task_name": "LanguageClassification"
92
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/MLSUMClusteringP2P.json ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "b5d54f8f3b61ae17845046286940f03c6bc79bc7",
3
+ "evaluation_time": 19.45046830177307,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.85",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "ru",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.45702686342079923,
14
+ "v_measure": 0.45702686342079923,
15
+ "v_measure_std": 0.06913826231783649,
16
+ "v_measures": [
17
+ 0.5273742802274182,
18
+ 0.4268113635433883,
19
+ 0.4396134537023812,
20
+ 0.4079782830352051,
21
+ 0.5243484295706045,
22
+ 0.5233615634250766,
23
+ 0.5195100972637988,
24
+ 0.4123702962344191,
25
+ 0.30343450294277835,
26
+ 0.48546636426292145
27
+ ]
28
+ }
29
+ ],
30
+ "validation": [
31
+ {
32
+ "hf_subset": "ru",
33
+ "languages": [
34
+ "rus-Cyrl"
35
+ ],
36
+ "main_score": 0.4352664466196433,
37
+ "v_measure": 0.4352664466196433,
38
+ "v_measure_std": 0.059164239709892395,
39
+ "v_measures": [
40
+ 0.4428478742284891,
41
+ 0.4646733278629463,
42
+ 0.29858505201563706,
43
+ 0.5099936034881329,
44
+ 0.4263698362867996,
45
+ 0.4753306504920366,
46
+ 0.43448114630264467,
47
+ 0.5043182186444757,
48
+ 0.38085575769254965,
49
+ 0.4152089991827221
50
+ ]
51
+ }
52
+ ]
53
+ },
54
+ "task_name": "MLSUMClusteringP2P"
55
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/MLSUMClusteringP2P.v2.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "b5d54f8f3b61ae17845046286940f03c6bc79bc7",
3
+ "evaluation_time": 18.050400733947754,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.87",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "ru",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.4414150154038496,
14
+ "v_measure": 0.4414150154038496,
15
+ "v_measure_std": 0.03535603452431544,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.4679769491107662,
19
+ 0.5054280148215679,
20
+ 0.43303032174359546,
21
+ 0.4087297258725467,
22
+ 0.47734750106840185,
23
+ 0.46117715192624886,
24
+ 0.4092379320337164,
25
+ 0.4289631740412,
26
+ 0.38069813045654066,
27
+ 0.4415612529639125
28
+ ]
29
+ }
30
+ }
31
+ ]
32
+ },
33
+ "task_name": "MLSUMClusteringP2P.v2"
34
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/MLSUMClusteringS2S.json ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "b5d54f8f3b61ae17845046286940f03c6bc79bc7",
3
+ "evaluation_time": 19.31701397895813,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.85",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "ru",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.429282847070706,
14
+ "v_measure": 0.429282847070706,
15
+ "v_measure_std": 0.06817015776485294,
16
+ "v_measures": [
17
+ 0.4982554608466768,
18
+ 0.38180877318667866,
19
+ 0.48285656988878645,
20
+ 0.4032204697574008,
21
+ 0.4274267976417764,
22
+ 0.5078012317958961,
23
+ 0.3955852569045282,
24
+ 0.5262121464131293,
25
+ 0.30866259987082495,
26
+ 0.360999164401362
27
+ ]
28
+ }
29
+ ],
30
+ "validation": [
31
+ {
32
+ "hf_subset": "ru",
33
+ "languages": [
34
+ "rus-Cyrl"
35
+ ],
36
+ "main_score": 0.42112487175502356,
37
+ "v_measure": 0.42112487175502356,
38
+ "v_measure_std": 0.05314024499836145,
39
+ "v_measures": [
40
+ 0.37738728809304645,
41
+ 0.46089951040797944,
42
+ 0.3903560084888077,
43
+ 0.35180390006100504,
44
+ 0.4127684542300222,
45
+ 0.5066764563113426,
46
+ 0.5183204286666835,
47
+ 0.4133966711743972,
48
+ 0.3980705616027965,
49
+ 0.3815694385141554
50
+ ]
51
+ }
52
+ ]
53
+ },
54
+ "task_name": "MLSUMClusteringS2S"
55
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/MLSUMClusteringS2S.v2.json ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "b5d54f8f3b61ae17845046286940f03c6bc79bc7",
3
+ "evaluation_time": 25.03780460357666,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.85",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "ru",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.4360075841550744,
14
+ "v_measure": 0.4360075841550744,
15
+ "v_measure_std": 0.019100439210879103,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.4512263510231582,
19
+ 0.4555233751961542,
20
+ 0.4247399383311838,
21
+ 0.44372241202026685,
22
+ 0.3897528482452662,
23
+ 0.4378737519622319,
24
+ 0.4403542391073648,
25
+ 0.4374650591960303,
26
+ 0.45784607213491185,
27
+ 0.42157179433417596
28
+ ]
29
+ }
30
+ }
31
+ ],
32
+ "validation": [
33
+ {
34
+ "hf_subset": "ru",
35
+ "languages": [
36
+ "rus-Cyrl"
37
+ ],
38
+ "main_score": 0.42831090521923487,
39
+ "v_measure": 0.42831090521923487,
40
+ "v_measure_std": 0.026990140568743635,
41
+ "v_measures": {
42
+ "Level 0": [
43
+ 0.44175394693751807,
44
+ 0.43515317448853863,
45
+ 0.4657090299531174,
46
+ 0.42170505135158143,
47
+ 0.3708615917326624,
48
+ 0.44185692334303195,
49
+ 0.42944134315162247,
50
+ 0.45354364917624457,
51
+ 0.4331132099885593,
52
+ 0.3899711320694725
53
+ ]
54
+ }
55
+ }
56
+ ]
57
+ },
58
+ "task_name": "MLSUMClusteringS2S.v2"
59
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/MassiveIntentClassification.json ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "4672e20407010da34463acc759c162ca9734bca6",
3
+ "evaluation_time": 17.33305311203003,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.85",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.6607935440484197,
10
+ "f1": 0.6343081703341417,
11
+ "f1_weighted": 0.6497766872014012,
12
+ "hf_subset": "ru",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.6607935440484197,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.6731674512441157,
20
+ "f1": 0.6531105757875425,
21
+ "f1_weighted": 0.6656177498284838
22
+ },
23
+ {
24
+ "accuracy": 0.6933422999327505,
25
+ "f1": 0.6610965130974625,
26
+ "f1_weighted": 0.6839461739346869
27
+ },
28
+ {
29
+ "accuracy": 0.6519838601210491,
30
+ "f1": 0.6262324637074906,
31
+ "f1_weighted": 0.6441711580522536
32
+ },
33
+ {
34
+ "accuracy": 0.6893073301950235,
35
+ "f1": 0.6556615322302324,
36
+ "f1_weighted": 0.6843116926549452
37
+ },
38
+ {
39
+ "accuracy": 0.65635507733692,
40
+ "f1": 0.6102243550232248,
41
+ "f1_weighted": 0.6428766126558386
42
+ },
43
+ {
44
+ "accuracy": 0.624747814391392,
45
+ "f1": 0.6192461211166198,
46
+ "f1_weighted": 0.6143729465420229
47
+ },
48
+ {
49
+ "accuracy": 0.6566913248150639,
50
+ "f1": 0.6282394930195696,
51
+ "f1_weighted": 0.6416958047945244
52
+ },
53
+ {
54
+ "accuracy": 0.6513113651647613,
55
+ "f1": 0.6265743767461933,
56
+ "f1_weighted": 0.6402046172162404
57
+ },
58
+ {
59
+ "accuracy": 0.6297915265635508,
60
+ "f1": 0.6136278404008212,
61
+ "f1_weighted": 0.6091639102677674
62
+ },
63
+ {
64
+ "accuracy": 0.6812373907195696,
65
+ "f1": 0.6490684322122594,
66
+ "f1_weighted": 0.6714062060672489
67
+ }
68
+ ]
69
+ }
70
+ ],
71
+ "validation": [
72
+ {
73
+ "accuracy": 0.653910477127398,
74
+ "f1": 0.6116235219832036,
75
+ "f1_weighted": 0.643272418661857,
76
+ "hf_subset": "ru",
77
+ "languages": [
78
+ "rus-Cyrl"
79
+ ],
80
+ "main_score": 0.653910477127398,
81
+ "scores_per_experiment": [
82
+ {
83
+ "accuracy": 0.661583866207575,
84
+ "f1": 0.6233512175942642,
85
+ "f1_weighted": 0.6610832046895213
86
+ },
87
+ {
88
+ "accuracy": 0.6935563207083129,
89
+ "f1": 0.6428552473432402,
90
+ "f1_weighted": 0.6838733414281566
91
+ },
92
+ {
93
+ "accuracy": 0.6625676340383669,
94
+ "f1": 0.6204448648785962,
95
+ "f1_weighted": 0.6552077546861638
96
+ },
97
+ {
98
+ "accuracy": 0.6650270536153468,
99
+ "f1": 0.6139539502401764,
100
+ "f1_weighted": 0.6589632349141006
101
+ },
102
+ {
103
+ "accuracy": 0.6645351696999509,
104
+ "f1": 0.6095626653160383,
105
+ "f1_weighted": 0.652814979543798
106
+ },
107
+ {
108
+ "accuracy": 0.6119035907525824,
109
+ "f1": 0.5925954577233485,
110
+ "f1_weighted": 0.5995760400278768
111
+ },
112
+ {
113
+ "accuracy": 0.6384653221839646,
114
+ "f1": 0.5921245565487646,
115
+ "f1_weighted": 0.6192342432272874
116
+ },
117
+ {
118
+ "accuracy": 0.6296114117068372,
119
+ "f1": 0.5938226933432713,
120
+ "f1_weighted": 0.6180514598678591
121
+ },
122
+ {
123
+ "accuracy": 0.6335464830300049,
124
+ "f1": 0.6017283644065152,
125
+ "f1_weighted": 0.6148525168247683
126
+ },
127
+ {
128
+ "accuracy": 0.6783079193310378,
129
+ "f1": 0.6257962024378204,
130
+ "f1_weighted": 0.6690674114090381
131
+ }
132
+ ]
133
+ }
134
+ ]
135
+ },
136
+ "task_name": "MassiveIntentClassification"
137
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/MassiveScenarioClassification.json ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "fad2c6e8459f9e1c45d9315f4953d921437d70f8",
3
+ "evaluation_time": 11.238596439361572,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.85",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.7112642905178211,
10
+ "f1": 0.7035797017070132,
11
+ "f1_weighted": 0.7052516832938883,
12
+ "hf_subset": "ru",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.7112642905178211,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.7215870880968392,
20
+ "f1": 0.7215058667446401,
21
+ "f1_weighted": 0.7154585354728603
22
+ },
23
+ {
24
+ "accuracy": 0.7199058507061197,
25
+ "f1": 0.7149693379160601,
26
+ "f1_weighted": 0.7112620631338245
27
+ },
28
+ {
29
+ "accuracy": 0.7259583053127101,
30
+ "f1": 0.7125060617936306,
31
+ "f1_weighted": 0.7223394833491779
32
+ },
33
+ {
34
+ "accuracy": 0.7286482851378615,
35
+ "f1": 0.7206542370334886,
36
+ "f1_weighted": 0.7270715576919436
37
+ },
38
+ {
39
+ "accuracy": 0.703093476798924,
40
+ "f1": 0.682373094300659,
41
+ "f1_weighted": 0.6918924358195725
42
+ },
43
+ {
44
+ "accuracy": 0.6809011432414257,
45
+ "f1": 0.6717742770650729,
46
+ "f1_weighted": 0.667860594856785
47
+ },
48
+ {
49
+ "accuracy": 0.6936785474108944,
50
+ "f1": 0.6844879928343284,
51
+ "f1_weighted": 0.6873843576387859
52
+ },
53
+ {
54
+ "accuracy": 0.7000672494956288,
55
+ "f1": 0.69946210328831,
56
+ "f1_weighted": 0.6981859021802034
57
+ },
58
+ {
59
+ "accuracy": 0.7215870880968392,
60
+ "f1": 0.7181786666009672,
61
+ "f1_weighted": 0.7215390354758083
62
+ },
63
+ {
64
+ "accuracy": 0.7172158708809684,
65
+ "f1": 0.7098853794929766,
66
+ "f1_weighted": 0.7095228673199212
67
+ }
68
+ ]
69
+ }
70
+ ],
71
+ "validation": [
72
+ {
73
+ "accuracy": 0.7071323167732415,
74
+ "f1": 0.6971408164740018,
75
+ "f1_weighted": 0.7031860412120622,
76
+ "hf_subset": "ru",
77
+ "languages": [
78
+ "rus-Cyrl"
79
+ ],
80
+ "main_score": 0.7071323167732415,
81
+ "scores_per_experiment": [
82
+ {
83
+ "accuracy": 0.7270044269552386,
84
+ "f1": 0.7170541187860603,
85
+ "f1_weighted": 0.7255240823113983
86
+ },
87
+ {
88
+ "accuracy": 0.7161829808165273,
89
+ "f1": 0.7109051789058611,
90
+ "f1_weighted": 0.7119058635135944
91
+ },
92
+ {
93
+ "accuracy": 0.7274963108706345,
94
+ "f1": 0.7154362795713805,
95
+ "f1_weighted": 0.7276221630620682
96
+ },
97
+ {
98
+ "accuracy": 0.706837186424004,
99
+ "f1": 0.7004567650603355,
100
+ "f1_weighted": 0.7019055641408652
101
+ },
102
+ {
103
+ "accuracy": 0.7142154451549434,
104
+ "f1": 0.6928719693765745,
105
+ "f1_weighted": 0.7069056441161415
106
+ },
107
+ {
108
+ "accuracy": 0.6660108214461387,
109
+ "f1": 0.6568322135552939,
110
+ "f1_weighted": 0.6554137228984807
111
+ },
112
+ {
113
+ "accuracy": 0.690113133300541,
114
+ "f1": 0.6762670199218028,
115
+ "f1_weighted": 0.6873424032089852
116
+ },
117
+ {
118
+ "accuracy": 0.6969995081160846,
119
+ "f1": 0.6912939409064918,
120
+ "f1_weighted": 0.6943830899318398
121
+ },
122
+ {
123
+ "accuracy": 0.7166748647319232,
124
+ "f1": 0.7106340402973855,
125
+ "f1_weighted": 0.7168944965565803
126
+ },
127
+ {
128
+ "accuracy": 0.7097884899163798,
129
+ "f1": 0.6996566383588322,
130
+ "f1_weighted": 0.7039633823806679
131
+ }
132
+ ]
133
+ }
134
+ ]
135
+ },
136
+ "task_name": "MassiveScenarioClassification"
137
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/MultiLongDocRetrieval.json ADDED
@@ -0,0 +1,307 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "d67138e705d963e346253a80e59676ddb418810a",
3
+ "evaluation_time": 682.6866171360016,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.85",
6
+ "scores": {
7
+ "dev": [
8
+ {
9
+ "hf_subset": "ru",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.40227,
14
+ "map_at_1": 0.32,
15
+ "map_at_10": 0.37329,
16
+ "map_at_100": 0.37837,
17
+ "map_at_1000": 0.37887,
18
+ "map_at_20": 0.37556,
19
+ "map_at_3": 0.35667,
20
+ "map_at_5": 0.36567,
21
+ "mrr_at_1": 0.32,
22
+ "mrr_at_10": 0.37328968253968253,
23
+ "mrr_at_100": 0.3783692468561393,
24
+ "mrr_at_1000": 0.3788745257343082,
25
+ "mrr_at_20": 0.37555539109583236,
26
+ "mrr_at_3": 0.3566666666666667,
27
+ "mrr_at_5": 0.3656666666666667,
28
+ "nauc_map_at_1000_diff1": 0.5049226885205111,
29
+ "nauc_map_at_1000_max": 0.36898584012716734,
30
+ "nauc_map_at_1000_std": 0.05812550023806558,
31
+ "nauc_map_at_100_diff1": 0.5050697995846534,
32
+ "nauc_map_at_100_max": 0.36919859606753014,
33
+ "nauc_map_at_100_std": 0.05857667392028195,
34
+ "nauc_map_at_10_diff1": 0.504580974701608,
35
+ "nauc_map_at_10_max": 0.36855673273298195,
36
+ "nauc_map_at_10_std": 0.05455656719953692,
37
+ "nauc_map_at_1_diff1": 0.5535226907845445,
38
+ "nauc_map_at_1_max": 0.3434590067430162,
39
+ "nauc_map_at_1_std": 0.021459916515037956,
40
+ "nauc_map_at_20_diff1": 0.5074265457331725,
41
+ "nauc_map_at_20_max": 0.36891426646349595,
42
+ "nauc_map_at_20_std": 0.05719585122911791,
43
+ "nauc_map_at_3_diff1": 0.5016898921886022,
44
+ "nauc_map_at_3_max": 0.35796801142511686,
45
+ "nauc_map_at_3_std": 0.03774056047908409,
46
+ "nauc_map_at_5_diff1": 0.5085848103501994,
47
+ "nauc_map_at_5_max": 0.36701981333184514,
48
+ "nauc_map_at_5_std": 0.05110962480483066,
49
+ "nauc_mrr_at_1000_diff1": 0.5049226885205111,
50
+ "nauc_mrr_at_1000_max": 0.36898584012716734,
51
+ "nauc_mrr_at_1000_std": 0.05812550023806558,
52
+ "nauc_mrr_at_100_diff1": 0.5050697995846534,
53
+ "nauc_mrr_at_100_max": 0.36919859606753014,
54
+ "nauc_mrr_at_100_std": 0.05857667392028195,
55
+ "nauc_mrr_at_10_diff1": 0.504580974701608,
56
+ "nauc_mrr_at_10_max": 0.36855673273298195,
57
+ "nauc_mrr_at_10_std": 0.05455656719953692,
58
+ "nauc_mrr_at_1_diff1": 0.5535226907845445,
59
+ "nauc_mrr_at_1_max": 0.3434590067430162,
60
+ "nauc_mrr_at_1_std": 0.021459916515037956,
61
+ "nauc_mrr_at_20_diff1": 0.5074265457331725,
62
+ "nauc_mrr_at_20_max": 0.36891426646349595,
63
+ "nauc_mrr_at_20_std": 0.05719585122911791,
64
+ "nauc_mrr_at_3_diff1": 0.5016898921886022,
65
+ "nauc_mrr_at_3_max": 0.35796801142511686,
66
+ "nauc_mrr_at_3_std": 0.03774056047908409,
67
+ "nauc_mrr_at_5_diff1": 0.5085848103501994,
68
+ "nauc_mrr_at_5_max": 0.36701981333184514,
69
+ "nauc_mrr_at_5_std": 0.05110962480483066,
70
+ "nauc_ndcg_at_1000_diff1": 0.4785725979552344,
71
+ "nauc_ndcg_at_1000_max": 0.3730847672455184,
72
+ "nauc_ndcg_at_1000_std": 0.08782842195936705,
73
+ "nauc_ndcg_at_100_diff1": 0.4794028113808931,
74
+ "nauc_ndcg_at_100_max": 0.37894267961471706,
75
+ "nauc_ndcg_at_100_std": 0.09036829487119005,
76
+ "nauc_ndcg_at_10_diff1": 0.486234968335003,
77
+ "nauc_ndcg_at_10_max": 0.3786452998953952,
78
+ "nauc_ndcg_at_10_std": 0.07288946637222726,
79
+ "nauc_ndcg_at_1_diff1": 0.5535226907845445,
80
+ "nauc_ndcg_at_1_max": 0.3434590067430162,
81
+ "nauc_ndcg_at_1_std": 0.021459916515037956,
82
+ "nauc_ndcg_at_20_diff1": 0.49465558979522056,
83
+ "nauc_ndcg_at_20_max": 0.37927592187355147,
84
+ "nauc_ndcg_at_20_std": 0.08121216908094145,
85
+ "nauc_ndcg_at_3_diff1": 0.4856656109968867,
86
+ "nauc_ndcg_at_3_max": 0.3609457150341269,
87
+ "nauc_ndcg_at_3_std": 0.04198353986653549,
88
+ "nauc_ndcg_at_5_diff1": 0.49664664337235803,
89
+ "nauc_ndcg_at_5_max": 0.3759266170720623,
90
+ "nauc_ndcg_at_5_std": 0.0659109548482772,
91
+ "nauc_precision_at_1000_diff1": 0.30603844316218937,
92
+ "nauc_precision_at_1000_max": 0.34094111017187934,
93
+ "nauc_precision_at_1000_std": 0.3075085449667387,
94
+ "nauc_precision_at_100_diff1": 0.3627263715273577,
95
+ "nauc_precision_at_100_max": 0.407795159457434,
96
+ "nauc_precision_at_100_std": 0.23050202696712566,
97
+ "nauc_precision_at_10_diff1": 0.42886215358831625,
98
+ "nauc_precision_at_10_max": 0.4087508230083197,
99
+ "nauc_precision_at_10_std": 0.13059728789662342,
100
+ "nauc_precision_at_1_diff1": 0.5535226907845445,
101
+ "nauc_precision_at_1_max": 0.3434590067430162,
102
+ "nauc_precision_at_1_std": 0.021459916515037956,
103
+ "nauc_precision_at_20_diff1": 0.45765562506409596,
104
+ "nauc_precision_at_20_max": 0.4106382251393018,
105
+ "nauc_precision_at_20_std": 0.1629302977472395,
106
+ "nauc_precision_at_3_diff1": 0.4403900617919669,
107
+ "nauc_precision_at_3_max": 0.3688916065911432,
108
+ "nauc_precision_at_3_std": 0.05372682801235828,
109
+ "nauc_precision_at_5_diff1": 0.46322493083468225,
110
+ "nauc_precision_at_5_max": 0.4021375998329596,
111
+ "nauc_precision_at_5_std": 0.11128438690817986,
112
+ "nauc_recall_at_1000_diff1": 0.30603844316218937,
113
+ "nauc_recall_at_1000_max": 0.3409411101718788,
114
+ "nauc_recall_at_1000_std": 0.30750854496673874,
115
+ "nauc_recall_at_100_diff1": 0.36272637152735787,
116
+ "nauc_recall_at_100_max": 0.40779515945743333,
117
+ "nauc_recall_at_100_std": 0.23050202696712532,
118
+ "nauc_recall_at_10_diff1": 0.42886215358831614,
119
+ "nauc_recall_at_10_max": 0.4087508230083197,
120
+ "nauc_recall_at_10_std": 0.13059728789662362,
121
+ "nauc_recall_at_1_diff1": 0.5535226907845445,
122
+ "nauc_recall_at_1_max": 0.3434590067430162,
123
+ "nauc_recall_at_1_std": 0.021459916515037956,
124
+ "nauc_recall_at_20_diff1": 0.45765562506409585,
125
+ "nauc_recall_at_20_max": 0.410638225139302,
126
+ "nauc_recall_at_20_std": 0.16293029774723938,
127
+ "nauc_recall_at_3_diff1": 0.4403900617919668,
128
+ "nauc_recall_at_3_max": 0.3688916065911432,
129
+ "nauc_recall_at_3_std": 0.0537268280123583,
130
+ "nauc_recall_at_5_diff1": 0.46322493083468186,
131
+ "nauc_recall_at_5_max": 0.40213759983295944,
132
+ "nauc_recall_at_5_std": 0.11128438690817982,
133
+ "ndcg_at_1": 0.32,
134
+ "ndcg_at_10": 0.40227,
135
+ "ndcg_at_100": 0.43192,
136
+ "ndcg_at_1000": 0.44965,
137
+ "ndcg_at_20": 0.41009,
138
+ "ndcg_at_3": 0.36786,
139
+ "ndcg_at_5": 0.38421,
140
+ "precision_at_1": 0.32,
141
+ "precision_at_10": 0.0495,
142
+ "precision_at_100": 0.00645,
143
+ "precision_at_1000": 0.0008,
144
+ "precision_at_20": 0.02625,
145
+ "precision_at_3": 0.13333,
146
+ "precision_at_5": 0.088,
147
+ "recall_at_1": 0.32,
148
+ "recall_at_10": 0.495,
149
+ "recall_at_100": 0.645,
150
+ "recall_at_1000": 0.795,
151
+ "recall_at_20": 0.525,
152
+ "recall_at_3": 0.4,
153
+ "recall_at_5": 0.44
154
+ }
155
+ ],
156
+ "test": [
157
+ {
158
+ "hf_subset": "ru",
159
+ "languages": [
160
+ "rus-Cyrl"
161
+ ],
162
+ "main_score": 0.39597,
163
+ "map_at_1": 0.32,
164
+ "map_at_10": 0.36961,
165
+ "map_at_100": 0.37476,
166
+ "map_at_1000": 0.37544,
167
+ "map_at_20": 0.37219,
168
+ "map_at_3": 0.35583,
169
+ "map_at_5": 0.36283,
170
+ "mrr_at_1": 0.32,
171
+ "mrr_at_10": 0.369609126984127,
172
+ "mrr_at_100": 0.3747603702111978,
173
+ "mrr_at_1000": 0.37544493858483885,
174
+ "mrr_at_20": 0.3721923323709315,
175
+ "mrr_at_3": 0.35583333333333333,
176
+ "mrr_at_5": 0.36283333333333334,
177
+ "nauc_map_at_1000_diff1": 0.6072991632213791,
178
+ "nauc_map_at_1000_max": 0.3928234347429879,
179
+ "nauc_map_at_1000_std": -0.1208335699816264,
180
+ "nauc_map_at_100_diff1": 0.6068885691575602,
181
+ "nauc_map_at_100_max": 0.392393132461218,
182
+ "nauc_map_at_100_std": -0.12148831949686865,
183
+ "nauc_map_at_10_diff1": 0.6074141811996672,
184
+ "nauc_map_at_10_max": 0.3896717802712305,
185
+ "nauc_map_at_10_std": -0.1272811317792866,
186
+ "nauc_map_at_1_diff1": 0.6389141603339398,
187
+ "nauc_map_at_1_max": 0.3825390666809377,
188
+ "nauc_map_at_1_std": -0.14395135395483255,
189
+ "nauc_map_at_20_diff1": 0.6078995640092393,
190
+ "nauc_map_at_20_max": 0.3914162256518424,
191
+ "nauc_map_at_20_std": -0.12555944651132878,
192
+ "nauc_map_at_3_diff1": 0.622238851594539,
193
+ "nauc_map_at_3_max": 0.39202637423318953,
194
+ "nauc_map_at_3_std": -0.1288910266032218,
195
+ "nauc_map_at_5_diff1": 0.6118886571968496,
196
+ "nauc_map_at_5_max": 0.39340623688220455,
197
+ "nauc_map_at_5_std": -0.12631578831758125,
198
+ "nauc_mrr_at_1000_diff1": 0.6072991632213791,
199
+ "nauc_mrr_at_1000_max": 0.3928234347429879,
200
+ "nauc_mrr_at_1000_std": -0.1208335699816264,
201
+ "nauc_mrr_at_100_diff1": 0.6068885691575602,
202
+ "nauc_mrr_at_100_max": 0.392393132461218,
203
+ "nauc_mrr_at_100_std": -0.12148831949686865,
204
+ "nauc_mrr_at_10_diff1": 0.6074141811996672,
205
+ "nauc_mrr_at_10_max": 0.3896717802712305,
206
+ "nauc_mrr_at_10_std": -0.1272811317792866,
207
+ "nauc_mrr_at_1_diff1": 0.6389141603339398,
208
+ "nauc_mrr_at_1_max": 0.3825390666809377,
209
+ "nauc_mrr_at_1_std": -0.14395135395483255,
210
+ "nauc_mrr_at_20_diff1": 0.6078995640092393,
211
+ "nauc_mrr_at_20_max": 0.3914162256518424,
212
+ "nauc_mrr_at_20_std": -0.12555944651132878,
213
+ "nauc_mrr_at_3_diff1": 0.622238851594539,
214
+ "nauc_mrr_at_3_max": 0.39202637423318953,
215
+ "nauc_mrr_at_3_std": -0.1288910266032218,
216
+ "nauc_mrr_at_5_diff1": 0.6118886571968496,
217
+ "nauc_mrr_at_5_max": 0.39340623688220455,
218
+ "nauc_mrr_at_5_std": -0.12631578831758125,
219
+ "nauc_ndcg_at_1000_diff1": 0.5851514237533868,
220
+ "nauc_ndcg_at_1000_max": 0.40321877658211946,
221
+ "nauc_ndcg_at_1000_std": -0.07888707791585431,
222
+ "nauc_ndcg_at_100_diff1": 0.5753852071087543,
223
+ "nauc_ndcg_at_100_max": 0.3952733246055713,
224
+ "nauc_ndcg_at_100_std": -0.09195921146297029,
225
+ "nauc_ndcg_at_10_diff1": 0.5846360822539703,
226
+ "nauc_ndcg_at_10_max": 0.385480545231369,
227
+ "nauc_ndcg_at_10_std": -0.12232779727556409,
228
+ "nauc_ndcg_at_1_diff1": 0.6389141603339398,
229
+ "nauc_ndcg_at_1_max": 0.3825390666809377,
230
+ "nauc_ndcg_at_1_std": -0.14395135395483255,
231
+ "nauc_ndcg_at_20_diff1": 0.585435237376976,
232
+ "nauc_ndcg_at_20_max": 0.3907943413863654,
233
+ "nauc_ndcg_at_20_std": -0.11544828924932488,
234
+ "nauc_ndcg_at_3_diff1": 0.6143679551409716,
235
+ "nauc_ndcg_at_3_max": 0.3916154005848283,
236
+ "nauc_ndcg_at_3_std": -0.12542875187651797,
237
+ "nauc_ndcg_at_5_diff1": 0.596068161862162,
238
+ "nauc_ndcg_at_5_max": 0.39430175938144923,
239
+ "nauc_ndcg_at_5_std": -0.1204065311095627,
240
+ "nauc_precision_at_1000_diff1": 0.4632330083368667,
241
+ "nauc_precision_at_1000_max": 0.5297866327539918,
242
+ "nauc_precision_at_1000_std": 0.3525787763176489,
243
+ "nauc_precision_at_100_diff1": 0.434421438021106,
244
+ "nauc_precision_at_100_max": 0.40953911946589605,
245
+ "nauc_precision_at_100_std": 0.05357966956896357,
246
+ "nauc_precision_at_10_diff1": 0.510525828305282,
247
+ "nauc_precision_at_10_max": 0.36958822906742883,
248
+ "nauc_precision_at_10_std": -0.10775246109876159,
249
+ "nauc_precision_at_1_diff1": 0.6389141603339398,
250
+ "nauc_precision_at_1_max": 0.3825390666809377,
251
+ "nauc_precision_at_1_std": -0.14395135395483255,
252
+ "nauc_precision_at_20_diff1": 0.5099298111975622,
253
+ "nauc_precision_at_20_max": 0.3886092823330972,
254
+ "nauc_precision_at_20_std": -0.07844550846074344,
255
+ "nauc_precision_at_3_diff1": 0.5912976313079299,
256
+ "nauc_precision_at_3_max": 0.3894181256436662,
257
+ "nauc_precision_at_3_std": -0.11602085478887753,
258
+ "nauc_precision_at_5_diff1": 0.5477884277743821,
259
+ "nauc_precision_at_5_max": 0.39633377118258356,
260
+ "nauc_precision_at_5_std": -0.10297694138455701,
261
+ "nauc_recall_at_1000_diff1": 0.46323300833686604,
262
+ "nauc_recall_at_1000_max": 0.5297866327539922,
263
+ "nauc_recall_at_1000_std": 0.35257877631764845,
264
+ "nauc_recall_at_100_diff1": 0.4344214380211059,
265
+ "nauc_recall_at_100_max": 0.40953911946589516,
266
+ "nauc_recall_at_100_std": 0.05357966956896275,
267
+ "nauc_recall_at_10_diff1": 0.510525828305282,
268
+ "nauc_recall_at_10_max": 0.369588229067429,
269
+ "nauc_recall_at_10_std": -0.10775246109876134,
270
+ "nauc_recall_at_1_diff1": 0.6389141603339398,
271
+ "nauc_recall_at_1_max": 0.3825390666809377,
272
+ "nauc_recall_at_1_std": -0.14395135395483255,
273
+ "nauc_recall_at_20_diff1": 0.5099298111975623,
274
+ "nauc_recall_at_20_max": 0.38860928233309794,
275
+ "nauc_recall_at_20_std": -0.0784455084607433,
276
+ "nauc_recall_at_3_diff1": 0.5912976313079299,
277
+ "nauc_recall_at_3_max": 0.3894181256436663,
278
+ "nauc_recall_at_3_std": -0.11602085478887743,
279
+ "nauc_recall_at_5_diff1": 0.5477884277743819,
280
+ "nauc_recall_at_5_max": 0.396333771182583,
281
+ "nauc_recall_at_5_std": -0.10297694138455747,
282
+ "ndcg_at_1": 0.32,
283
+ "ndcg_at_10": 0.39597,
284
+ "ndcg_at_100": 0.42451,
285
+ "ndcg_at_1000": 0.44875,
286
+ "ndcg_at_20": 0.40584,
287
+ "ndcg_at_3": 0.3672,
288
+ "ndcg_at_5": 0.37968,
289
+ "precision_at_1": 0.32,
290
+ "precision_at_10": 0.048,
291
+ "precision_at_100": 0.0062,
292
+ "precision_at_1000": 0.00083,
293
+ "precision_at_20": 0.026,
294
+ "precision_at_3": 0.13333,
295
+ "precision_at_5": 0.086,
296
+ "recall_at_1": 0.32,
297
+ "recall_at_10": 0.48,
298
+ "recall_at_100": 0.62,
299
+ "recall_at_1000": 0.825,
300
+ "recall_at_20": 0.52,
301
+ "recall_at_3": 0.4,
302
+ "recall_at_5": 0.43
303
+ }
304
+ ]
305
+ },
306
+ "task_name": "MultiLongDocRetrieval"
307
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/MultilingualSentimentClassification.json ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "2b9b4d10fc589af67794141fe8cbd3739de1eb33",
3
+ "evaluation_time": 13.458463668823242,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.85",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.7856978085351788,
10
+ "ap": 0.84493198223752,
11
+ "ap_weighted": 0.84493198223752,
12
+ "f1": 0.7709719452619935,
13
+ "f1_weighted": 0.7898884944996265,
14
+ "hf_subset": "rus",
15
+ "languages": [
16
+ "rus-Cyrl"
17
+ ],
18
+ "main_score": 0.7856978085351788,
19
+ "scores_per_experiment": [
20
+ {
21
+ "accuracy": 0.8027681660899654,
22
+ "ap": 0.8203433563602925,
23
+ "ap_weighted": 0.8203433563602925,
24
+ "f1": 0.7729925140829867,
25
+ "f1_weighted": 0.801156108497231
26
+ },
27
+ {
28
+ "accuracy": 0.7670126874279123,
29
+ "ap": 0.842266384662846,
30
+ "ap_weighted": 0.842266384662846,
31
+ "f1": 0.7561487050960736,
32
+ "f1_weighted": 0.7737804141264348
33
+ },
34
+ {
35
+ "accuracy": 0.8096885813148789,
36
+ "ap": 0.8754460993523581,
37
+ "ap_weighted": 0.8754460993523581,
38
+ "f1": 0.799874095058231,
39
+ "f1_weighted": 0.8150558784864832
40
+ },
41
+ {
42
+ "accuracy": 0.825836216839677,
43
+ "ap": 0.8851844163423996,
44
+ "ap_weighted": 0.8851844163423996,
45
+ "f1": 0.8158394114378557,
46
+ "f1_weighted": 0.8305376847266723
47
+ },
48
+ {
49
+ "accuracy": 0.698961937716263,
50
+ "ap": 0.8167281309405517,
51
+ "ap_weighted": 0.8167281309405517,
52
+ "f1": 0.6942170872605655,
53
+ "f1_weighted": 0.7072654260137335
54
+ },
55
+ {
56
+ "accuracy": 0.8027681660899654,
57
+ "ap": 0.826744948309937,
58
+ "ap_weighted": 0.826744948309937,
59
+ "f1": 0.7767459300408238,
60
+ "f1_weighted": 0.8028560790495909
61
+ },
62
+ {
63
+ "accuracy": 0.7843137254901961,
64
+ "ap": 0.8728923478664239,
65
+ "ap_weighted": 0.8728923478664239,
66
+ "f1": 0.7776584671463778,
67
+ "f1_weighted": 0.7908358786671381
68
+ },
69
+ {
70
+ "accuracy": 0.6955017301038062,
71
+ "ap": 0.7906763410001297,
72
+ "ap_weighted": 0.7906763410001297,
73
+ "f1": 0.6837689969604863,
74
+ "f1_weighted": 0.7046349954249534
75
+ },
76
+ {
77
+ "accuracy": 0.8189158016147635,
78
+ "ap": 0.8486374088946167,
79
+ "ap_weighted": 0.8486374088946167,
80
+ "f1": 0.7991471215351813,
81
+ "f1_weighted": 0.820732775886784
82
+ },
83
+ {
84
+ "accuracy": 0.8512110726643599,
85
+ "ap": 0.870400388645644,
86
+ "ap_weighted": 0.870400388645644,
87
+ "f1": 0.8333271240013531,
88
+ "f1_weighted": 0.852029704117244
89
+ }
90
+ ]
91
+ }
92
+ ]
93
+ },
94
+ "task_name": "MultilingualSentimentClassification"
95
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/NTREXBitextMining.json ADDED
@@ -0,0 +1,899 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33",
3
+ "evaluation_time": 80.0495195388794,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.85",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.06509764646970456,
10
+ "f1": 0.04880756202406125,
11
+ "hf_subset": "arb_Arab-rus_Cyrl",
12
+ "languages": [
13
+ "arb-Arab",
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.04880756202406125,
17
+ "precision": 0.04443315384313043,
18
+ "recall": 0.06509764646970456
19
+ },
20
+ {
21
+ "accuracy": 0.8407611417125689,
22
+ "f1": 0.8067246832982392,
23
+ "hf_subset": "bel_Cyrl-rus_Cyrl",
24
+ "languages": [
25
+ "bel-Cyrl",
26
+ "rus-Cyrl"
27
+ ],
28
+ "main_score": 0.8067246832982392,
29
+ "precision": 0.7928289259285755,
30
+ "recall": 0.8407611417125689
31
+ },
32
+ {
33
+ "accuracy": 0.04406609914872309,
34
+ "f1": 0.03200583539750235,
35
+ "hf_subset": "ben_Beng-rus_Cyrl",
36
+ "languages": [
37
+ "ben-Beng",
38
+ "rus-Cyrl"
39
+ ],
40
+ "main_score": 0.03200583539750235,
41
+ "precision": 0.02928169556766814,
42
+ "recall": 0.04406609914872309
43
+ },
44
+ {
45
+ "accuracy": 0.5948923385077617,
46
+ "f1": 0.5494027353507088,
47
+ "hf_subset": "bos_Latn-rus_Cyrl",
48
+ "languages": [
49
+ "bos-Latn",
50
+ "rus-Cyrl"
51
+ ],
52
+ "main_score": 0.5494027353507088,
53
+ "precision": 0.5350027748191425,
54
+ "recall": 0.5948923385077617
55
+ },
56
+ {
57
+ "accuracy": 0.9459188783174762,
58
+ "f1": 0.9311999134232484,
59
+ "hf_subset": "bul_Cyrl-rus_Cyrl",
60
+ "languages": [
61
+ "bul-Cyrl",
62
+ "rus-Cyrl"
63
+ ],
64
+ "main_score": 0.9311999134232484,
65
+ "precision": 0.9246953763979302,
66
+ "recall": 0.9459188783174762
67
+ },
68
+ {
69
+ "accuracy": 0.5818728092138208,
70
+ "f1": 0.5338965487515986,
71
+ "hf_subset": "ces_Latn-rus_Cyrl",
72
+ "languages": [
73
+ "ces-Latn",
74
+ "rus-Cyrl"
75
+ ],
76
+ "main_score": 0.5338965487515986,
77
+ "precision": 0.5176792671609898,
78
+ "recall": 0.5818728092138208
79
+ },
80
+ {
81
+ "accuracy": 0.7080620931397096,
82
+ "f1": 0.6657826128771475,
83
+ "hf_subset": "deu_Latn-rus_Cyrl",
84
+ "languages": [
85
+ "deu-Latn",
86
+ "rus-Cyrl"
87
+ ],
88
+ "main_score": 0.6657826128771475,
89
+ "precision": 0.6508054856129935,
90
+ "recall": 0.7080620931397096
91
+ },
92
+ {
93
+ "accuracy": 0.14071106659989985,
94
+ "f1": 0.11328410374269476,
95
+ "hf_subset": "ell_Grek-rus_Cyrl",
96
+ "languages": [
97
+ "ell-Grek",
98
+ "rus-Cyrl"
99
+ ],
100
+ "main_score": 0.11328410374269476,
101
+ "precision": 0.1049933844547164,
102
+ "recall": 0.14071106659989985
103
+ },
104
+ {
105
+ "accuracy": 0.9889834752128193,
106
+ "f1": 0.985478217325989,
107
+ "hf_subset": "eng_Latn-rus_Cyrl",
108
+ "languages": [
109
+ "eng-Latn",
110
+ "rus-Cyrl"
111
+ ],
112
+ "main_score": 0.985478217325989,
113
+ "precision": 0.9838090469036889,
114
+ "recall": 0.9889834752128193
115
+ },
116
+ {
117
+ "accuracy": 0.16675012518778168,
118
+ "f1": 0.14594663676795921,
119
+ "hf_subset": "fas_Arab-rus_Cyrl",
120
+ "languages": [
121
+ "fas-Arab",
122
+ "rus-Cyrl"
123
+ ],
124
+ "main_score": 0.14594663676795921,
125
+ "precision": 0.13958699801839514,
126
+ "recall": 0.16675012518778168
127
+ },
128
+ {
129
+ "accuracy": 0.4496745117676515,
130
+ "f1": 0.4032040114608448,
131
+ "hf_subset": "fin_Latn-rus_Cyrl",
132
+ "languages": [
133
+ "fin-Latn",
134
+ "rus-Cyrl"
135
+ ],
136
+ "main_score": 0.4032040114608448,
137
+ "precision": 0.38897691534756734,
138
+ "recall": 0.4496745117676515
139
+ },
140
+ {
141
+ "accuracy": 0.8372558838257386,
142
+ "f1": 0.801232231369829,
143
+ "hf_subset": "fra_Latn-rus_Cyrl",
144
+ "languages": [
145
+ "fra-Latn",
146
+ "rus-Cyrl"
147
+ ],
148
+ "main_score": 0.801232231369829,
149
+ "precision": 0.7869491827929485,
150
+ "recall": 0.8372558838257386
151
+ },
152
+ {
153
+ "accuracy": 0.10866299449173761,
154
+ "f1": 0.08442292827544921,
155
+ "hf_subset": "heb_Hebr-rus_Cyrl",
156
+ "languages": [
157
+ "heb-Hebr",
158
+ "rus-Cyrl"
159
+ ],
160
+ "main_score": 0.08442292827544921,
161
+ "precision": 0.07803833679190662,
162
+ "recall": 0.10866299449173761
163
+ },
164
+ {
165
+ "accuracy": 0.09514271407110667,
166
+ "f1": 0.072895686673908,
167
+ "hf_subset": "hin_Deva-rus_Cyrl",
168
+ "languages": [
169
+ "hin-Deva",
170
+ "rus-Cyrl"
171
+ ],
172
+ "main_score": 0.072895686673908,
173
+ "precision": 0.06724897827340359,
174
+ "recall": 0.09514271407110667
175
+ },
176
+ {
177
+ "accuracy": 0.5878818227341012,
178
+ "f1": 0.542103595569139,
179
+ "hf_subset": "hrv_Latn-rus_Cyrl",
180
+ "languages": [
181
+ "hrv-Latn",
182
+ "rus-Cyrl"
183
+ ],
184
+ "main_score": 0.542103595569139,
185
+ "precision": 0.5278617355359898,
186
+ "recall": 0.5878818227341012
187
+ },
188
+ {
189
+ "accuracy": 0.4101151727591387,
190
+ "f1": 0.36279575750378756,
191
+ "hf_subset": "hun_Latn-rus_Cyrl",
192
+ "languages": [
193
+ "hun-Latn",
194
+ "rus-Cyrl"
195
+ ],
196
+ "main_score": 0.36279575750378756,
197
+ "precision": 0.3484854642082763,
198
+ "recall": 0.4101151727591387
199
+ },
200
+ {
201
+ "accuracy": 0.4246369554331497,
202
+ "f1": 0.3773113391337017,
203
+ "hf_subset": "ind_Latn-rus_Cyrl",
204
+ "languages": [
205
+ "ind-Latn",
206
+ "rus-Cyrl"
207
+ ],
208
+ "main_score": 0.3773113391337017,
209
+ "precision": 0.36257733461770353,
210
+ "recall": 0.4246369554331497
211
+ },
212
+ {
213
+ "accuracy": 0.05107661492238358,
214
+ "f1": 0.03760645074434319,
215
+ "hf_subset": "jpn_Jpan-rus_Cyrl",
216
+ "languages": [
217
+ "jpn-Jpan",
218
+ "rus-Cyrl"
219
+ ],
220
+ "main_score": 0.03760645074434319,
221
+ "precision": 0.03413473652579605,
222
+ "recall": 0.05107661492238358
223
+ },
224
+ {
225
+ "accuracy": 0.09864797195793691,
226
+ "f1": 0.08800453936149924,
227
+ "hf_subset": "kor_Hang-rus_Cyrl",
228
+ "languages": [
229
+ "kor-Hang",
230
+ "rus-Cyrl"
231
+ ],
232
+ "main_score": 0.08800453936149924,
233
+ "precision": 0.08482217245326998,
234
+ "recall": 0.09864797195793691
235
+ },
236
+ {
237
+ "accuracy": 0.39959939909864794,
238
+ "f1": 0.3555417630690983,
239
+ "hf_subset": "lit_Latn-rus_Cyrl",
240
+ "languages": [
241
+ "lit-Latn",
242
+ "rus-Cyrl"
243
+ ],
244
+ "main_score": 0.3555417630690983,
245
+ "precision": 0.34307119091259697,
246
+ "recall": 0.39959939909864794
247
+ },
248
+ {
249
+ "accuracy": 0.9083625438157236,
250
+ "f1": 0.8867373641106822,
251
+ "hf_subset": "mkd_Cyrl-rus_Cyrl",
252
+ "languages": [
253
+ "mkd-Cyrl",
254
+ "rus-Cyrl"
255
+ ],
256
+ "main_score": 0.8867373641106822,
257
+ "precision": 0.8772122469418414,
258
+ "recall": 0.9083625438157236
259
+ },
260
+ {
261
+ "accuracy": 0.6634952428642964,
262
+ "f1": 0.6214956852654588,
263
+ "hf_subset": "nld_Latn-rus_Cyrl",
264
+ "languages": [
265
+ "nld-Latn",
266
+ "rus-Cyrl"
267
+ ],
268
+ "main_score": 0.6214956852654588,
269
+ "precision": 0.6069089305245047,
270
+ "recall": 0.6634952428642964
271
+ },
272
+ {
273
+ "accuracy": 0.5187781672508763,
274
+ "f1": 0.4684380759007405,
275
+ "hf_subset": "pol_Latn-rus_Cyrl",
276
+ "languages": [
277
+ "pol-Latn",
278
+ "rus-Cyrl"
279
+ ],
280
+ "main_score": 0.4684380759007405,
281
+ "precision": 0.4526550519986278,
282
+ "recall": 0.5187781672508763
283
+ },
284
+ {
285
+ "accuracy": 0.7265898848272409,
286
+ "f1": 0.6801916869442072,
287
+ "hf_subset": "por_Latn-rus_Cyrl",
288
+ "languages": [
289
+ "por-Latn",
290
+ "rus-Cyrl"
291
+ ],
292
+ "main_score": 0.6801916869442072,
293
+ "precision": 0.6641653783954696,
294
+ "recall": 0.7265898848272409
295
+ },
296
+ {
297
+ "accuracy": 0.10866299449173761,
298
+ "f1": 0.07949908162723732,
299
+ "hf_subset": "rus_Cyrl-arb_Arab",
300
+ "languages": [
301
+ "rus-Cyrl",
302
+ "arb-Arab"
303
+ ],
304
+ "main_score": 0.07949908162723732,
305
+ "precision": 0.07341199421837027,
306
+ "recall": 0.10866299449173761
307
+ },
308
+ {
309
+ "accuracy": 0.8612919379068603,
310
+ "f1": 0.824514549602181,
311
+ "hf_subset": "rus_Cyrl-bel_Cyrl",
312
+ "languages": [
313
+ "rus-Cyrl",
314
+ "bel-Cyrl"
315
+ ],
316
+ "main_score": 0.824514549602181,
317
+ "precision": 0.8081330328826574,
318
+ "recall": 0.8612919379068603
319
+ },
320
+ {
321
+ "accuracy": 0.07260891337005508,
322
+ "f1": 0.04594056511054593,
323
+ "hf_subset": "rus_Cyrl-ben_Beng",
324
+ "languages": [
325
+ "rus-Cyrl",
326
+ "ben-Beng"
327
+ ],
328
+ "main_score": 0.04594056511054593,
329
+ "precision": 0.039606351012165496,
330
+ "recall": 0.07260891337005508
331
+ },
332
+ {
333
+ "accuracy": 0.7095643465197796,
334
+ "f1": 0.6513492460913592,
335
+ "hf_subset": "rus_Cyrl-bos_Latn",
336
+ "languages": [
337
+ "rus-Cyrl",
338
+ "bos-Latn"
339
+ ],
340
+ "main_score": 0.6513492460913592,
341
+ "precision": 0.6283437060352433,
342
+ "recall": 0.7095643465197796
343
+ },
344
+ {
345
+ "accuracy": 0.9534301452178268,
346
+ "f1": 0.9389083625438157,
347
+ "hf_subset": "rus_Cyrl-bul_Cyrl",
348
+ "languages": [
349
+ "rus-Cyrl",
350
+ "bul-Cyrl"
351
+ ],
352
+ "main_score": 0.9389083625438157,
353
+ "precision": 0.9319813052912703,
354
+ "recall": 0.9534301452178268
355
+ },
356
+ {
357
+ "accuracy": 0.7020530796194292,
358
+ "f1": 0.6454073968095,
359
+ "hf_subset": "rus_Cyrl-ces_Latn",
360
+ "languages": [
361
+ "rus-Cyrl",
362
+ "ces-Latn"
363
+ ],
364
+ "main_score": 0.6454073968095,
365
+ "precision": 0.6233175159564744,
366
+ "recall": 0.7020530796194292
367
+ },
368
+ {
369
+ "accuracy": 0.7936905358037055,
370
+ "f1": 0.7430932112454396,
371
+ "hf_subset": "rus_Cyrl-deu_Latn",
372
+ "languages": [
373
+ "rus-Cyrl",
374
+ "deu-Latn"
375
+ ],
376
+ "main_score": 0.7430932112454396,
377
+ "precision": 0.7217004077544888,
378
+ "recall": 0.7936905358037055
379
+ },
380
+ {
381
+ "accuracy": 0.18627941912869303,
382
+ "f1": 0.12991244624904133,
383
+ "hf_subset": "rus_Cyrl-ell_Grek",
384
+ "languages": [
385
+ "rus-Cyrl",
386
+ "ell-Grek"
387
+ ],
388
+ "main_score": 0.12991244624904133,
389
+ "precision": 0.11639775984418817,
390
+ "recall": 0.18627941912869303
391
+ },
392
+ {
393
+ "accuracy": 0.9879819729594391,
394
+ "f1": 0.9840594224670338,
395
+ "hf_subset": "rus_Cyrl-eng_Latn",
396
+ "languages": [
397
+ "rus-Cyrl",
398
+ "eng-Latn"
399
+ ],
400
+ "main_score": 0.9840594224670338,
401
+ "precision": 0.9821398764813887,
402
+ "recall": 0.9879819729594391
403
+ },
404
+ {
405
+ "accuracy": 0.21482223335002504,
406
+ "f1": 0.14683366295497755,
407
+ "hf_subset": "rus_Cyrl-fas_Arab",
408
+ "languages": [
409
+ "rus-Cyrl",
410
+ "fas-Arab"
411
+ ],
412
+ "main_score": 0.14683366295497755,
413
+ "precision": 0.13017372818657788,
414
+ "recall": 0.21482223335002504
415
+ },
416
+ {
417
+ "accuracy": 0.57135703555333,
418
+ "f1": 0.5048852138236215,
419
+ "hf_subset": "rus_Cyrl-fin_Latn",
420
+ "languages": [
421
+ "rus-Cyrl",
422
+ "fin-Latn"
423
+ ],
424
+ "main_score": 0.5048852138236215,
425
+ "precision": 0.481152253410641,
426
+ "recall": 0.57135703555333
427
+ },
428
+ {
429
+ "accuracy": 0.8693039559339009,
430
+ "f1": 0.8344278322245272,
431
+ "hf_subset": "rus_Cyrl-fra_Latn",
432
+ "languages": [
433
+ "rus-Cyrl",
434
+ "fra-Latn"
435
+ ],
436
+ "main_score": 0.8344278322245272,
437
+ "precision": 0.8188282423635452,
438
+ "recall": 0.8693039559339009
439
+ },
440
+ {
441
+ "accuracy": 0.1517275913870806,
442
+ "f1": 0.1048430019294976,
443
+ "hf_subset": "rus_Cyrl-heb_Hebr",
444
+ "languages": [
445
+ "rus-Cyrl",
446
+ "heb-Hebr"
447
+ ],
448
+ "main_score": 0.1048430019294976,
449
+ "precision": 0.09526529695425623,
450
+ "recall": 0.1517275913870806
451
+ },
452
+ {
453
+ "accuracy": 0.1357035553329995,
454
+ "f1": 0.09464590953613519,
455
+ "hf_subset": "rus_Cyrl-hin_Deva",
456
+ "languages": [
457
+ "rus-Cyrl",
458
+ "hin-Deva"
459
+ ],
460
+ "main_score": 0.09464590953613519,
461
+ "precision": 0.08564597115487124,
462
+ "recall": 0.1357035553329995
463
+ },
464
+ {
465
+ "accuracy": 0.7040560841261893,
466
+ "f1": 0.6431739962035405,
467
+ "hf_subset": "rus_Cyrl-hrv_Latn",
468
+ "languages": [
469
+ "rus-Cyrl",
470
+ "hrv-Latn"
471
+ ],
472
+ "main_score": 0.6431739962035405,
473
+ "precision": 0.6198138477557606,
474
+ "recall": 0.7040560841261893
475
+ },
476
+ {
477
+ "accuracy": 0.5533299949924887,
478
+ "f1": 0.48702382578196296,
479
+ "hf_subset": "rus_Cyrl-hun_Latn",
480
+ "languages": [
481
+ "rus-Cyrl",
482
+ "hun-Latn"
483
+ ],
484
+ "main_score": 0.48702382578196296,
485
+ "precision": 0.4631310421620887,
486
+ "recall": 0.5533299949924887
487
+ },
488
+ {
489
+ "accuracy": 0.5037556334501753,
490
+ "f1": 0.4233186903234814,
491
+ "hf_subset": "rus_Cyrl-ind_Latn",
492
+ "languages": [
493
+ "rus-Cyrl",
494
+ "ind-Latn"
495
+ ],
496
+ "main_score": 0.4233186903234814,
497
+ "precision": 0.39709252037118836,
498
+ "recall": 0.5037556334501753
499
+ },
500
+ {
501
+ "accuracy": 0.0786179268903355,
502
+ "f1": 0.046268356932756646,
503
+ "hf_subset": "rus_Cyrl-jpn_Jpan",
504
+ "languages": [
505
+ "rus-Cyrl",
506
+ "jpn-Jpan"
507
+ ],
508
+ "main_score": 0.046268356932756646,
509
+ "precision": 0.040014051004519484,
510
+ "recall": 0.0786179268903355
511
+ },
512
+ {
513
+ "accuracy": 0.1397095643465198,
514
+ "f1": 0.077650317712593,
515
+ "hf_subset": "rus_Cyrl-kor_Hang",
516
+ "languages": [
517
+ "rus-Cyrl",
518
+ "kor-Hang"
519
+ ],
520
+ "main_score": 0.077650317712593,
521
+ "precision": 0.0643752438767403,
522
+ "recall": 0.1397095643465198
523
+ },
524
+ {
525
+ "accuracy": 0.542313470205308,
526
+ "f1": 0.4707925441275466,
527
+ "hf_subset": "rus_Cyrl-lit_Latn",
528
+ "languages": [
529
+ "rus-Cyrl",
530
+ "lit-Latn"
531
+ ],
532
+ "main_score": 0.4707925441275466,
533
+ "precision": 0.44534520735351396,
534
+ "recall": 0.542313470205308
535
+ },
536
+ {
537
+ "accuracy": 0.9298948422633951,
538
+ "f1": 0.9098481054915707,
539
+ "hf_subset": "rus_Cyrl-mkd_Cyrl",
540
+ "languages": [
541
+ "rus-Cyrl",
542
+ "mkd-Cyrl"
543
+ ],
544
+ "main_score": 0.9098481054915707,
545
+ "precision": 0.900350525788683,
546
+ "recall": 0.9298948422633951
547
+ },
548
+ {
549
+ "accuracy": 0.757636454682023,
550
+ "f1": 0.7043410714917976,
551
+ "hf_subset": "rus_Cyrl-nld_Latn",
552
+ "languages": [
553
+ "rus-Cyrl",
554
+ "nld-Latn"
555
+ ],
556
+ "main_score": 0.7043410714917976,
557
+ "precision": 0.6830801758192844,
558
+ "recall": 0.757636454682023
559
+ },
560
+ {
561
+ "accuracy": 0.6459689534301453,
562
+ "f1": 0.582310345304837,
563
+ "hf_subset": "rus_Cyrl-pol_Latn",
564
+ "languages": [
565
+ "rus-Cyrl",
566
+ "pol-Latn"
567
+ ],
568
+ "main_score": 0.582310345304837,
569
+ "precision": 0.5580869319852795,
570
+ "recall": 0.6459689534301453
571
+ },
572
+ {
573
+ "accuracy": 0.7951927891837757,
574
+ "f1": 0.7500846507857024,
575
+ "hf_subset": "rus_Cyrl-por_Latn",
576
+ "languages": [
577
+ "rus-Cyrl",
578
+ "por-Latn"
579
+ ],
580
+ "main_score": 0.7500846507857024,
581
+ "precision": 0.7308852167139598,
582
+ "recall": 0.7951927891837757
583
+ },
584
+ {
585
+ "accuracy": 0.7030545818728092,
586
+ "f1": 0.6459470952460438,
587
+ "hf_subset": "rus_Cyrl-slk_Latn",
588
+ "languages": [
589
+ "rus-Cyrl",
590
+ "slk-Latn"
591
+ ],
592
+ "main_score": 0.6459470952460438,
593
+ "precision": 0.6238902003799349,
594
+ "recall": 0.7030545818728092
595
+ },
596
+ {
597
+ "accuracy": 0.6730095142714071,
598
+ "f1": 0.6090814793618999,
599
+ "hf_subset": "rus_Cyrl-slv_Latn",
600
+ "languages": [
601
+ "rus-Cyrl",
602
+ "slv-Latn"
603
+ ],
604
+ "main_score": 0.6090814793618999,
605
+ "precision": 0.5845504548308755,
606
+ "recall": 0.6730095142714071
607
+ },
608
+ {
609
+ "accuracy": 0.8462694041061593,
610
+ "f1": 0.8070097570598322,
611
+ "hf_subset": "rus_Cyrl-spa_Latn",
612
+ "languages": [
613
+ "rus-Cyrl",
614
+ "spa-Latn"
615
+ ],
616
+ "main_score": 0.8070097570598322,
617
+ "precision": 0.7902039567287439,
618
+ "recall": 0.8462694041061593
619
+ },
620
+ {
621
+ "accuracy": 0.8217325988983475,
622
+ "f1": 0.7775258125283163,
623
+ "hf_subset": "rus_Cyrl-srp_Cyrl",
624
+ "languages": [
625
+ "rus-Cyrl",
626
+ "srp-Cyrl"
627
+ ],
628
+ "main_score": 0.7775258125283163,
629
+ "precision": 0.7582039726256051,
630
+ "recall": 0.8217325988983475
631
+ },
632
+ {
633
+ "accuracy": 0.7025538307461192,
634
+ "f1": 0.6432524146970817,
635
+ "hf_subset": "rus_Cyrl-srp_Latn",
636
+ "languages": [
637
+ "rus-Cyrl",
638
+ "srp-Latn"
639
+ ],
640
+ "main_score": 0.6432524146970817,
641
+ "precision": 0.6204707855433944,
642
+ "recall": 0.7025538307461192
643
+ },
644
+ {
645
+ "accuracy": 0.4101151727591387,
646
+ "f1": 0.32719321552726827,
647
+ "hf_subset": "rus_Cyrl-swa_Latn",
648
+ "languages": [
649
+ "rus-Cyrl",
650
+ "swa-Latn"
651
+ ],
652
+ "main_score": 0.32719321552726827,
653
+ "precision": 0.302612403380099,
654
+ "recall": 0.4101151727591387
655
+ },
656
+ {
657
+ "accuracy": 0.7836755132699048,
658
+ "f1": 0.7359710200220967,
659
+ "hf_subset": "rus_Cyrl-swe_Latn",
660
+ "languages": [
661
+ "rus-Cyrl",
662
+ "swe-Latn"
663
+ ],
664
+ "main_score": 0.7359710200220967,
665
+ "precision": 0.7164399071134174,
666
+ "recall": 0.7836755132699048
667
+ },
668
+ {
669
+ "accuracy": 0.1402103154732098,
670
+ "f1": 0.09658469154847996,
671
+ "hf_subset": "rus_Cyrl-tam_Taml",
672
+ "languages": [
673
+ "rus-Cyrl",
674
+ "tam-Taml"
675
+ ],
676
+ "main_score": 0.09658469154847996,
677
+ "precision": 0.08759865954495558,
678
+ "recall": 0.1402103154732098
679
+ },
680
+ {
681
+ "accuracy": 0.5107661492238358,
682
+ "f1": 0.44609894639939707,
683
+ "hf_subset": "rus_Cyrl-tur_Latn",
684
+ "languages": [
685
+ "rus-Cyrl",
686
+ "tur-Latn"
687
+ ],
688
+ "main_score": 0.44609894639939707,
689
+ "precision": 0.4232217373679567,
690
+ "recall": 0.5107661492238358
691
+ },
692
+ {
693
+ "accuracy": 0.9694541812719079,
694
+ "f1": 0.9604740443999332,
695
+ "hf_subset": "rus_Cyrl-ukr_Cyrl",
696
+ "languages": [
697
+ "rus-Cyrl",
698
+ "ukr-Cyrl"
699
+ ],
700
+ "main_score": 0.9604740443999332,
701
+ "precision": 0.9562260056751795,
702
+ "recall": 0.9694541812719079
703
+ },
704
+ {
705
+ "accuracy": 0.5312969454181272,
706
+ "f1": 0.473493216875783,
707
+ "hf_subset": "rus_Cyrl-vie_Latn",
708
+ "languages": [
709
+ "rus-Cyrl",
710
+ "vie-Latn"
711
+ ],
712
+ "main_score": 0.473493216875783,
713
+ "precision": 0.4524815180924344,
714
+ "recall": 0.5312969454181272
715
+ },
716
+ {
717
+ "accuracy": 0.313970956434652,
718
+ "f1": 0.24260927422223683,
719
+ "hf_subset": "rus_Cyrl-zho_Hant",
720
+ "languages": [
721
+ "rus-Cyrl",
722
+ "zho-Hant"
723
+ ],
724
+ "main_score": 0.24260927422223683,
725
+ "precision": 0.22350452075330857,
726
+ "recall": 0.313970956434652
727
+ },
728
+ {
729
+ "accuracy": 0.3985978968452679,
730
+ "f1": 0.3203881231331921,
731
+ "hf_subset": "rus_Cyrl-zul_Latn",
732
+ "languages": [
733
+ "rus-Cyrl",
734
+ "zul-Latn"
735
+ ],
736
+ "main_score": 0.3203881231331921,
737
+ "precision": 0.29770266682015883,
738
+ "recall": 0.3985978968452679
739
+ },
740
+ {
741
+ "accuracy": 0.557836755132699,
742
+ "f1": 0.5135922533581,
743
+ "hf_subset": "slk_Latn-rus_Cyrl",
744
+ "languages": [
745
+ "slk-Latn",
746
+ "rus-Cyrl"
747
+ ],
748
+ "main_score": 0.5135922533581,
749
+ "precision": 0.5000050690007637,
750
+ "recall": 0.557836755132699
751
+ },
752
+ {
753
+ "accuracy": 0.5508262393590385,
754
+ "f1": 0.5060256255275988,
755
+ "hf_subset": "slv_Latn-rus_Cyrl",
756
+ "languages": [
757
+ "slv-Latn",
758
+ "rus-Cyrl"
759
+ ],
760
+ "main_score": 0.5060256255275988,
761
+ "precision": 0.49215013435870536,
762
+ "recall": 0.5508262393590385
763
+ },
764
+ {
765
+ "accuracy": 0.7911867801702553,
766
+ "f1": 0.7511286087750785,
767
+ "hf_subset": "spa_Latn-rus_Cyrl",
768
+ "languages": [
769
+ "spa-Latn",
770
+ "rus-Cyrl"
771
+ ],
772
+ "main_score": 0.7511286087750785,
773
+ "precision": 0.7364055140682038,
774
+ "recall": 0.7911867801702553
775
+ },
776
+ {
777
+ "accuracy": 0.7721582373560341,
778
+ "f1": 0.7387494828318855,
779
+ "hf_subset": "srp_Cyrl-rus_Cyrl",
780
+ "languages": [
781
+ "srp-Cyrl",
782
+ "rus-Cyrl"
783
+ ],
784
+ "main_score": 0.7387494828318855,
785
+ "precision": 0.7269864464071216,
786
+ "recall": 0.7721582373560341
787
+ },
788
+ {
789
+ "accuracy": 0.5528292438657987,
790
+ "f1": 0.5063399861526777,
791
+ "hf_subset": "srp_Latn-rus_Cyrl",
792
+ "languages": [
793
+ "srp-Latn",
794
+ "rus-Cyrl"
795
+ ],
796
+ "main_score": 0.5063399861526777,
797
+ "precision": 0.49137847789701605,
798
+ "recall": 0.5528292438657987
799
+ },
800
+ {
801
+ "accuracy": 0.2944416624937406,
802
+ "f1": 0.2592275535416291,
803
+ "hf_subset": "swa_Latn-rus_Cyrl",
804
+ "languages": [
805
+ "swa-Latn",
806
+ "rus-Cyrl"
807
+ ],
808
+ "main_score": 0.2592275535416291,
809
+ "precision": 0.24911257044958537,
810
+ "recall": 0.2944416624937406
811
+ },
812
+ {
813
+ "accuracy": 0.6905358037055583,
814
+ "f1": 0.6430506458930674,
815
+ "hf_subset": "swe_Latn-rus_Cyrl",
816
+ "languages": [
817
+ "swe-Latn",
818
+ "rus-Cyrl"
819
+ ],
820
+ "main_score": 0.6430506458930674,
821
+ "precision": 0.6264585106702423,
822
+ "recall": 0.6905358037055583
823
+ },
824
+ {
825
+ "accuracy": 0.10315473209814723,
826
+ "f1": 0.08031950987608973,
827
+ "hf_subset": "tam_Taml-rus_Cyrl",
828
+ "languages": [
829
+ "tam-Taml",
830
+ "rus-Cyrl"
831
+ ],
832
+ "main_score": 0.08031950987608973,
833
+ "precision": 0.07413612985933278,
834
+ "recall": 0.10315473209814723
835
+ },
836
+ {
837
+ "accuracy": 0.39509263895843766,
838
+ "f1": 0.3454484828621361,
839
+ "hf_subset": "tur_Latn-rus_Cyrl",
840
+ "languages": [
841
+ "tur-Latn",
842
+ "rus-Cyrl"
843
+ ],
844
+ "main_score": 0.3454484828621361,
845
+ "precision": 0.33124363251443417,
846
+ "recall": 0.39509263895843766
847
+ },
848
+ {
849
+ "accuracy": 0.9619429143715573,
850
+ "f1": 0.9517943582039725,
851
+ "hf_subset": "ukr_Cyrl-rus_Cyrl",
852
+ "languages": [
853
+ "ukr-Cyrl",
854
+ "rus-Cyrl"
855
+ ],
856
+ "main_score": 0.9517943582039725,
857
+ "precision": 0.9472124853947589,
858
+ "recall": 0.9619429143715573
859
+ },
860
+ {
861
+ "accuracy": 0.41111667501251875,
862
+ "f1": 0.36551373726009684,
863
+ "hf_subset": "vie_Latn-rus_Cyrl",
864
+ "languages": [
865
+ "vie-Latn",
866
+ "rus-Cyrl"
867
+ ],
868
+ "main_score": 0.36551373726009684,
869
+ "precision": 0.3512578997218753,
870
+ "recall": 0.41111667501251875
871
+ },
872
+ {
873
+ "accuracy": 0.27240861291937907,
874
+ "f1": 0.24006993217863198,
875
+ "hf_subset": "zho_Hant-rus_Cyrl",
876
+ "languages": [
877
+ "zho-Hant",
878
+ "rus-Cyrl"
879
+ ],
880
+ "main_score": 0.24006993217863198,
881
+ "precision": 0.23001666514980884,
882
+ "recall": 0.27240861291937907
883
+ },
884
+ {
885
+ "accuracy": 0.3194792188282424,
886
+ "f1": 0.2851122013269222,
887
+ "hf_subset": "zul_Latn-rus_Cyrl",
888
+ "languages": [
889
+ "zul-Latn",
890
+ "rus-Cyrl"
891
+ ],
892
+ "main_score": 0.2851122013269222,
893
+ "precision": 0.27456465796638774,
894
+ "recall": 0.3194792188282424
895
+ }
896
+ ]
897
+ },
898
+ "task_name": "NTREXBitextMining"
899
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/OpusparcusPC.json ADDED
@@ -0,0 +1,105 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "9e9b1f8ef51616073f47f306f7f47dd91663f86a",
3
+ "evaluation_time": 4.2754998207092285,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.85",
6
+ "scores": {
7
+ "test.full": [
8
+ {
9
+ "cosine_accuracy": 0.7787990196078431,
10
+ "cosine_accuracy_threshold": 0.7010196447372437,
11
+ "cosine_ap": 0.8931675036831226,
12
+ "cosine_f1": 0.8428384849804093,
13
+ "cosine_f1_threshold": 0.7005778551101685,
14
+ "cosine_precision": 0.7876322213181448,
15
+ "cosine_recall": 0.9063670411985019,
16
+ "dot_accuracy": 0.7787990196078431,
17
+ "dot_accuracy_threshold": 0.7010196447372437,
18
+ "dot_ap": 0.8931675036831226,
19
+ "dot_f1": 0.8428384849804093,
20
+ "dot_f1_threshold": 0.7005778551101685,
21
+ "dot_precision": 0.7876322213181448,
22
+ "dot_recall": 0.9063670411985019,
23
+ "euclidean_accuracy": 0.7787990196078431,
24
+ "euclidean_accuracy_threshold": 0.7732791900634766,
25
+ "euclidean_ap": 0.8931675036831226,
26
+ "euclidean_f1": 0.8428384849804093,
27
+ "euclidean_f1_threshold": 0.7738502025604248,
28
+ "euclidean_precision": 0.7876322213181448,
29
+ "euclidean_recall": 0.9063670411985019,
30
+ "hf_subset": "ru",
31
+ "languages": [
32
+ "rus-Cyrl"
33
+ ],
34
+ "main_score": 0.8931675036831226,
35
+ "manhattan_accuracy": 0.7757352941176471,
36
+ "manhattan_accuracy_threshold": 16.89359474182129,
37
+ "manhattan_ap": 0.8928841525931214,
38
+ "manhattan_f1": 0.8414267296948861,
39
+ "manhattan_f1_threshold": 17.236160278320312,
40
+ "manhattan_precision": 0.7776012708498808,
41
+ "manhattan_recall": 0.9166666666666666,
42
+ "max_ap": 0.8931675036831226,
43
+ "max_f1": 0.8428384849804093,
44
+ "max_precision": 0.7876322213181448,
45
+ "max_recall": 0.9166666666666666,
46
+ "similarity_accuracy": 0.7787990196078431,
47
+ "similarity_accuracy_threshold": 0.7010196447372437,
48
+ "similarity_ap": 0.8931675036831226,
49
+ "similarity_f1": 0.8428384849804093,
50
+ "similarity_f1_threshold": 0.7005778551101685,
51
+ "similarity_precision": 0.7876322213181448,
52
+ "similarity_recall": 0.9063670411985019
53
+ }
54
+ ],
55
+ "validation.full": [
56
+ {
57
+ "cosine_accuracy": 0.7784730913642053,
58
+ "cosine_accuracy_threshold": 0.7424734234809875,
59
+ "cosine_ap": 0.9013615627082359,
60
+ "cosine_f1": 0.8383884110457219,
61
+ "cosine_f1_threshold": 0.7087600231170654,
62
+ "cosine_precision": 0.7788057190916736,
63
+ "cosine_recall": 0.907843137254902,
64
+ "dot_accuracy": 0.7784730913642053,
65
+ "dot_accuracy_threshold": 0.7424734830856323,
66
+ "dot_ap": 0.90136241109006,
67
+ "dot_f1": 0.8383884110457219,
68
+ "dot_f1_threshold": 0.7087600827217102,
69
+ "dot_precision": 0.7788057190916736,
70
+ "dot_recall": 0.907843137254902,
71
+ "euclidean_accuracy": 0.7784730913642053,
72
+ "euclidean_accuracy_threshold": 0.7176719903945923,
73
+ "euclidean_ap": 0.90136241109006,
74
+ "euclidean_f1": 0.8383884110457219,
75
+ "euclidean_f1_threshold": 0.7632036805152893,
76
+ "euclidean_precision": 0.7788057190916736,
77
+ "euclidean_recall": 0.907843137254902,
78
+ "hf_subset": "ru",
79
+ "languages": [
80
+ "rus-Cyrl"
81
+ ],
82
+ "main_score": 0.90136241109006,
83
+ "manhattan_accuracy": 0.7772215269086358,
84
+ "manhattan_accuracy_threshold": 15.513605117797852,
85
+ "manhattan_ap": 0.9008814281020514,
86
+ "manhattan_f1": 0.8390287769784173,
87
+ "manhattan_f1_threshold": 16.847454071044922,
88
+ "manhattan_precision": 0.7749169435215947,
89
+ "manhattan_recall": 0.9147058823529411,
90
+ "max_ap": 0.90136241109006,
91
+ "max_f1": 0.8390287769784173,
92
+ "max_precision": 0.7788057190916736,
93
+ "max_recall": 0.9147058823529411,
94
+ "similarity_accuracy": 0.7784730913642053,
95
+ "similarity_accuracy_threshold": 0.7424734830856323,
96
+ "similarity_ap": 0.90136241109006,
97
+ "similarity_f1": 0.8383884110457219,
98
+ "similarity_f1_threshold": 0.708760142326355,
99
+ "similarity_precision": 0.7788057190916736,
100
+ "similarity_recall": 0.907843137254902
101
+ }
102
+ ]
103
+ },
104
+ "task_name": "OpusparcusPC"
105
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/PublicHealthQA.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "main",
3
+ "evaluation_time": 0.8576195240020752,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.85",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "russian",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.77367,
14
+ "map_at_1": 0.63077,
15
+ "map_at_10": 0.73045,
16
+ "map_at_100": 0.73403,
17
+ "map_at_1000": 0.73403,
18
+ "map_at_20": 0.73238,
19
+ "map_at_3": 0.71538,
20
+ "map_at_5": 0.72462,
21
+ "mrr_at_1": 0.6307692307692307,
22
+ "mrr_at_10": 0.7304456654456655,
23
+ "mrr_at_100": 0.7340319615025498,
24
+ "mrr_at_1000": 0.7340319615025498,
25
+ "mrr_at_20": 0.732376283846872,
26
+ "mrr_at_3": 0.7153846153846154,
27
+ "mrr_at_5": 0.7246153846153846,
28
+ "nauc_map_at_1000_diff1": 0.8204979567430769,
29
+ "nauc_map_at_1000_max": 0.5020828897777788,
30
+ "nauc_map_at_1000_std": 0.3714842360004974,
31
+ "nauc_map_at_100_diff1": 0.8204979567430769,
32
+ "nauc_map_at_100_max": 0.5020828897777788,
33
+ "nauc_map_at_100_std": 0.3714842360004974,
34
+ "nauc_map_at_10_diff1": 0.8203462217758073,
35
+ "nauc_map_at_10_max": 0.5036410059672116,
36
+ "nauc_map_at_10_std": 0.37802461366015006,
37
+ "nauc_map_at_1_diff1": 0.8346494868703969,
38
+ "nauc_map_at_1_max": 0.45188565515625734,
39
+ "nauc_map_at_1_std": 0.29220244144853735,
40
+ "nauc_map_at_20_diff1": 0.8193315306455635,
41
+ "nauc_map_at_20_max": 0.5038878774328058,
42
+ "nauc_map_at_20_std": 0.3752598968609065,
43
+ "nauc_map_at_3_diff1": 0.8170120296262231,
44
+ "nauc_map_at_3_max": 0.49747112980090646,
45
+ "nauc_map_at_3_std": 0.36259072267838716,
46
+ "nauc_map_at_5_diff1": 0.8242459981999789,
47
+ "nauc_map_at_5_max": 0.5042346184562007,
48
+ "nauc_map_at_5_std": 0.37624365532930015,
49
+ "nauc_mrr_at_1000_diff1": 0.8204979567430769,
50
+ "nauc_mrr_at_1000_max": 0.5020828897777788,
51
+ "nauc_mrr_at_1000_std": 0.3714842360004974,
52
+ "nauc_mrr_at_100_diff1": 0.8204979567430769,
53
+ "nauc_mrr_at_100_max": 0.5020828897777788,
54
+ "nauc_mrr_at_100_std": 0.3714842360004974,
55
+ "nauc_mrr_at_10_diff1": 0.8203462217758073,
56
+ "nauc_mrr_at_10_max": 0.5036410059672116,
57
+ "nauc_mrr_at_10_std": 0.37802461366015006,
58
+ "nauc_mrr_at_1_diff1": 0.8346494868703969,
59
+ "nauc_mrr_at_1_max": 0.45188565515625734,
60
+ "nauc_mrr_at_1_std": 0.29220244144853735,
61
+ "nauc_mrr_at_20_diff1": 0.8193315306455635,
62
+ "nauc_mrr_at_20_max": 0.5038878774328058,
63
+ "nauc_mrr_at_20_std": 0.3752598968609065,
64
+ "nauc_mrr_at_3_diff1": 0.8170120296262231,
65
+ "nauc_mrr_at_3_max": 0.49747112980090646,
66
+ "nauc_mrr_at_3_std": 0.36259072267838716,
67
+ "nauc_mrr_at_5_diff1": 0.8242459981999789,
68
+ "nauc_mrr_at_5_max": 0.5042346184562007,
69
+ "nauc_mrr_at_5_std": 0.37624365532930015,
70
+ "nauc_ndcg_at_1000_diff1": 0.8176885264870617,
71
+ "nauc_ndcg_at_1000_max": 0.5113359743136842,
72
+ "nauc_ndcg_at_1000_std": 0.3877079674065654,
73
+ "nauc_ndcg_at_100_diff1": 0.8176885264870617,
74
+ "nauc_ndcg_at_100_max": 0.5113359743136842,
75
+ "nauc_ndcg_at_100_std": 0.3877079674065654,
76
+ "nauc_ndcg_at_10_diff1": 0.8135705369187243,
77
+ "nauc_ndcg_at_10_max": 0.5263833300626314,
78
+ "nauc_ndcg_at_10_std": 0.4287684867527576,
79
+ "nauc_ndcg_at_1_diff1": 0.8346494868703969,
80
+ "nauc_ndcg_at_1_max": 0.45188565515625734,
81
+ "nauc_ndcg_at_1_std": 0.29220244144853735,
82
+ "nauc_ndcg_at_20_diff1": 0.8086358053451389,
83
+ "nauc_ndcg_at_20_max": 0.527833610391398,
84
+ "nauc_ndcg_at_20_std": 0.4175393114975508,
85
+ "nauc_ndcg_at_3_diff1": 0.8095861427096206,
86
+ "nauc_ndcg_at_3_max": 0.5111846431047327,
87
+ "nauc_ndcg_at_3_std": 0.38866521142874416,
88
+ "nauc_ndcg_at_5_diff1": 0.824718161813802,
89
+ "nauc_ndcg_at_5_max": 0.5268755593875084,
90
+ "nauc_ndcg_at_5_std": 0.4202191406704718,
91
+ "nauc_precision_at_1000_diff1": 1.0,
92
+ "nauc_precision_at_1000_max": 1.0,
93
+ "nauc_precision_at_1000_std": 1.0,
94
+ "nauc_precision_at_100_diff1": NaN,
95
+ "nauc_precision_at_100_max": NaN,
96
+ "nauc_precision_at_100_std": NaN,
97
+ "nauc_precision_at_10_diff1": 0.7618164026439197,
98
+ "nauc_precision_at_10_max": 0.6905151548720841,
99
+ "nauc_precision_at_10_std": 0.8117407038144833,
100
+ "nauc_precision_at_1_diff1": 0.8346494868703969,
101
+ "nauc_precision_at_1_max": 0.45188565515625734,
102
+ "nauc_precision_at_1_std": 0.29220244144853735,
103
+ "nauc_precision_at_20_diff1": 0.6751273955910031,
104
+ "nauc_precision_at_20_max": 0.7867785532292002,
105
+ "nauc_precision_at_20_std": 0.8616650049850431,
106
+ "nauc_precision_at_3_diff1": 0.7796631919766247,
107
+ "nauc_precision_at_3_max": 0.5642066212568962,
108
+ "nauc_precision_at_3_std": 0.49227955026266085,
109
+ "nauc_precision_at_5_diff1": 0.832014717044242,
110
+ "nauc_precision_at_5_max": 0.6439782799629971,
111
+ "nauc_precision_at_5_std": 0.6517847222953163,
112
+ "nauc_recall_at_1000_diff1": NaN,
113
+ "nauc_recall_at_1000_max": NaN,
114
+ "nauc_recall_at_1000_std": NaN,
115
+ "nauc_recall_at_100_diff1": NaN,
116
+ "nauc_recall_at_100_max": NaN,
117
+ "nauc_recall_at_100_std": NaN,
118
+ "nauc_recall_at_10_diff1": 0.7618164026439214,
119
+ "nauc_recall_at_10_max": 0.6905151548720851,
120
+ "nauc_recall_at_10_std": 0.8117407038144823,
121
+ "nauc_recall_at_1_diff1": 0.8346494868703969,
122
+ "nauc_recall_at_1_max": 0.45188565515625734,
123
+ "nauc_recall_at_1_std": 0.29220244144853735,
124
+ "nauc_recall_at_20_diff1": 0.6751273955910043,
125
+ "nauc_recall_at_20_max": 0.7867785532292005,
126
+ "nauc_recall_at_20_std": 0.861665004985044,
127
+ "nauc_recall_at_3_diff1": 0.7796631919766249,
128
+ "nauc_recall_at_3_max": 0.564206621256896,
129
+ "nauc_recall_at_3_std": 0.49227955026266096,
130
+ "nauc_recall_at_5_diff1": 0.8320147170442402,
131
+ "nauc_recall_at_5_max": 0.6439782799629958,
132
+ "nauc_recall_at_5_std": 0.6517847222953154,
133
+ "ndcg_at_1": 0.63077,
134
+ "ndcg_at_10": 0.77367,
135
+ "ndcg_at_100": 0.79286,
136
+ "ndcg_at_1000": 0.79286,
137
+ "ndcg_at_20": 0.78121,
138
+ "ndcg_at_3": 0.74121,
139
+ "ndcg_at_5": 0.75906,
140
+ "precision_at_1": 0.63077,
141
+ "precision_at_10": 0.09077,
142
+ "precision_at_100": 0.01,
143
+ "precision_at_1000": 0.001,
144
+ "precision_at_20": 0.04692,
145
+ "precision_at_3": 0.27179,
146
+ "precision_at_5": 0.17231,
147
+ "recall_at_1": 0.63077,
148
+ "recall_at_10": 0.90769,
149
+ "recall_at_100": 1.0,
150
+ "recall_at_1000": 1.0,
151
+ "recall_at_20": 0.93846,
152
+ "recall_at_3": 0.81538,
153
+ "recall_at_5": 0.86154
154
+ }
155
+ ]
156
+ },
157
+ "task_name": "PublicHealthQA"
158
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/RUParaPhraserSTS.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "43265056790b8f7c59e0139acb4be0a8dad2c8f4",
3
+ "evaluation_time": 1.964456558227539,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.49",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "cosine_pearson": 0.6745790107750946,
10
+ "cosine_spearman": 0.7296642757009373,
11
+ "euclidean_pearson": 0.707654617816984,
12
+ "euclidean_spearman": 0.7296642750862428,
13
+ "hf_subset": "default",
14
+ "languages": [
15
+ "rus-Cyrl"
16
+ ],
17
+ "main_score": 0.7296642757009373,
18
+ "manhattan_pearson": 0.7061082382072492,
19
+ "manhattan_spearman": 0.727920735377209,
20
+ "pearson": [
21
+ 0.6745790026874137,
22
+ 1.1593643084891602e-255
23
+ ],
24
+ "spearman": [
25
+ 0.7296642757009373,
26
+ 1.37153e-319
27
+ ]
28
+ }
29
+ ]
30
+ },
31
+ "task_name": "RUParaPhraserSTS"
32
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/RiaNewsRetrieval.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "82374b0bbacda6114f39ff9c5b925fa1512ca5d7",
3
+ "evaluation_time": 4953.349033355713,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.49",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.6936,
14
+ "map_at_1": 0.546,
15
+ "map_at_10": 0.64636,
16
+ "map_at_100": 0.65095,
17
+ "map_at_1000": 0.65111,
18
+ "map_at_20": 0.64931,
19
+ "map_at_3": 0.62478,
20
+ "map_at_5": 0.63771,
21
+ "mrr_at_1": 0.546,
22
+ "mrr_at_10": 0.6464210317460276,
23
+ "mrr_at_100": 0.6510111643037915,
24
+ "mrr_at_1000": 0.6511706325407844,
25
+ "mrr_at_20": 0.6493712196205874,
26
+ "mrr_at_3": 0.624816666666663,
27
+ "mrr_at_5": 0.6377716666666601,
28
+ "nauc_map_at_1000_diff1": 0.6192717985566469,
29
+ "nauc_map_at_1000_max": 0.2554965390176142,
30
+ "nauc_map_at_1000_std": -0.14529094336477097,
31
+ "nauc_map_at_100_diff1": 0.6192030924815182,
32
+ "nauc_map_at_100_max": 0.25552874773162126,
33
+ "nauc_map_at_100_std": -0.14519880481791372,
34
+ "nauc_map_at_10_diff1": 0.6183513984903937,
35
+ "nauc_map_at_10_max": 0.25501121411197497,
36
+ "nauc_map_at_10_std": -0.1469458596284794,
37
+ "nauc_map_at_1_diff1": 0.6573125956002223,
38
+ "nauc_map_at_1_max": 0.24903833516494245,
39
+ "nauc_map_at_1_std": -0.14829828108352996,
40
+ "nauc_map_at_20_diff1": 0.6188616517477113,
41
+ "nauc_map_at_20_max": 0.2552052302494999,
42
+ "nauc_map_at_20_std": -0.14559366020613665,
43
+ "nauc_map_at_3_diff1": 0.6198513033613942,
44
+ "nauc_map_at_3_max": 0.253967923369672,
45
+ "nauc_map_at_3_std": -0.1531089943966326,
46
+ "nauc_map_at_5_diff1": 0.6184889737711866,
47
+ "nauc_map_at_5_max": 0.25452717641766254,
48
+ "nauc_map_at_5_std": -0.14906939960258417,
49
+ "nauc_mrr_at_1000_diff1": 0.6192198333554683,
50
+ "nauc_mrr_at_1000_max": 0.25555068760438787,
51
+ "nauc_mrr_at_1000_std": -0.1453535701487458,
52
+ "nauc_mrr_at_100_diff1": 0.6191511503500164,
53
+ "nauc_mrr_at_100_max": 0.2555828694482267,
54
+ "nauc_mrr_at_100_std": -0.14526138313487838,
55
+ "nauc_mrr_at_10_diff1": 0.6183002465083474,
56
+ "nauc_mrr_at_10_max": 0.2550633494700226,
57
+ "nauc_mrr_at_10_std": -0.14700928303559116,
58
+ "nauc_mrr_at_1_diff1": 0.6573125956002223,
59
+ "nauc_mrr_at_1_max": 0.2492521161380164,
60
+ "nauc_mrr_at_1_std": -0.1483389679789941,
61
+ "nauc_mrr_at_20_diff1": 0.6188016768662878,
62
+ "nauc_mrr_at_20_max": 0.25528317882886503,
63
+ "nauc_mrr_at_20_std": -0.14563737941618315,
64
+ "nauc_mrr_at_3_diff1": 0.6198083571247902,
65
+ "nauc_mrr_at_3_max": 0.25405227037654393,
66
+ "nauc_mrr_at_3_std": -0.15317613874208946,
67
+ "nauc_mrr_at_5_diff1": 0.6184523285933092,
68
+ "nauc_mrr_at_5_max": 0.25459903317312155,
69
+ "nauc_mrr_at_5_std": -0.1491329805916664,
70
+ "nauc_ndcg_at_1000_diff1": 0.6090677177512321,
71
+ "nauc_ndcg_at_1000_max": 0.26128677177225235,
72
+ "nauc_ndcg_at_1000_std": -0.1342191912053969,
73
+ "nauc_ndcg_at_100_diff1": 0.6069179363962744,
74
+ "nauc_ndcg_at_100_max": 0.26221025599598397,
75
+ "nauc_ndcg_at_100_std": -0.13102645537513857,
76
+ "nauc_ndcg_at_10_diff1": 0.6021548704526015,
77
+ "nauc_ndcg_at_10_max": 0.2581707211591428,
78
+ "nauc_ndcg_at_10_std": -0.14089110606539773,
79
+ "nauc_ndcg_at_1_diff1": 0.6573125956002223,
80
+ "nauc_ndcg_at_1_max": 0.24903833516494245,
81
+ "nauc_ndcg_at_1_std": -0.14829828108352996,
82
+ "nauc_ndcg_at_20_diff1": 0.6036737597036538,
83
+ "nauc_ndcg_at_20_max": 0.2591754810640342,
84
+ "nauc_ndcg_at_20_std": -0.13473688658295488,
85
+ "nauc_ndcg_at_3_diff1": 0.6066536301837209,
86
+ "nauc_ndcg_at_3_max": 0.2553975038454247,
87
+ "nauc_ndcg_at_3_std": -0.1545646126120372,
88
+ "nauc_ndcg_at_5_diff1": 0.6031233060130586,
89
+ "nauc_ndcg_at_5_max": 0.2563634308448639,
90
+ "nauc_ndcg_at_5_std": -0.14704682873260821,
91
+ "nauc_precision_at_1000_diff1": 0.4251749008782194,
92
+ "nauc_precision_at_1000_max": 0.5321858211883739,
93
+ "nauc_precision_at_1000_std": 0.3267883642990191,
94
+ "nauc_precision_at_100_diff1": 0.47020970550382185,
95
+ "nauc_precision_at_100_max": 0.3914384973208474,
96
+ "nauc_precision_at_100_std": 0.14426695603165984,
97
+ "nauc_precision_at_10_diff1": 0.5157100755055488,
98
+ "nauc_precision_at_10_max": 0.27656527338226683,
99
+ "nauc_precision_at_10_std": -0.1019936976699208,
100
+ "nauc_precision_at_1_diff1": 0.6573125956002223,
101
+ "nauc_precision_at_1_max": 0.24903833516494245,
102
+ "nauc_precision_at_1_std": -0.14829828108352996,
103
+ "nauc_precision_at_20_diff1": 0.5005936129692405,
104
+ "nauc_precision_at_20_max": 0.2910045808154064,
105
+ "nauc_precision_at_20_std": -0.03784190883084216,
106
+ "nauc_precision_at_3_diff1": 0.5610828699631795,
107
+ "nauc_precision_at_3_max": 0.2602150710258821,
108
+ "nauc_precision_at_3_std": -0.15949833324350682,
109
+ "nauc_precision_at_5_diff1": 0.5406068053845902,
110
+ "nauc_precision_at_5_max": 0.26360836973260915,
111
+ "nauc_precision_at_5_std": -0.13742130328352495,
112
+ "nauc_recall_at_1000_diff1": 0.4251749008782242,
113
+ "nauc_recall_at_1000_max": 0.5321858211883732,
114
+ "nauc_recall_at_1000_std": 0.3267883642990184,
115
+ "nauc_recall_at_100_diff1": 0.47020970550382385,
116
+ "nauc_recall_at_100_max": 0.3914384973208488,
117
+ "nauc_recall_at_100_std": 0.1442669560316651,
118
+ "nauc_recall_at_10_diff1": 0.5157100755055495,
119
+ "nauc_recall_at_10_max": 0.2765652733822694,
120
+ "nauc_recall_at_10_std": -0.10199369766991769,
121
+ "nauc_recall_at_1_diff1": 0.6573125956002223,
122
+ "nauc_recall_at_1_max": 0.24903833516494245,
123
+ "nauc_recall_at_1_std": -0.14829828108352996,
124
+ "nauc_recall_at_20_diff1": 0.5005936129692395,
125
+ "nauc_recall_at_20_max": 0.2910045808154081,
126
+ "nauc_recall_at_20_std": -0.03784190883083919,
127
+ "nauc_recall_at_3_diff1": 0.5610828699631795,
128
+ "nauc_recall_at_3_max": 0.2602150710258824,
129
+ "nauc_recall_at_3_std": -0.15949833324350754,
130
+ "nauc_recall_at_5_diff1": 0.54060680538459,
131
+ "nauc_recall_at_5_max": 0.26360836973261015,
132
+ "nauc_recall_at_5_std": -0.13742130328352384,
133
+ "ndcg_at_1": 0.546,
134
+ "ndcg_at_10": 0.6936,
135
+ "ndcg_at_100": 0.71576,
136
+ "ndcg_at_1000": 0.72014,
137
+ "ndcg_at_20": 0.70428,
138
+ "ndcg_at_3": 0.64947,
139
+ "ndcg_at_5": 0.6728,
140
+ "precision_at_1": 0.546,
141
+ "precision_at_10": 0.08413,
142
+ "precision_at_100": 0.00945,
143
+ "precision_at_1000": 0.00098,
144
+ "precision_at_20": 0.04417,
145
+ "precision_at_3": 0.2402,
146
+ "precision_at_5": 0.15546,
147
+ "recall_at_1": 0.546,
148
+ "recall_at_10": 0.8413,
149
+ "recall_at_100": 0.9445,
150
+ "recall_at_1000": 0.9793,
151
+ "recall_at_20": 0.8834,
152
+ "recall_at_3": 0.7206,
153
+ "recall_at_5": 0.7773
154
+ }
155
+ ]
156
+ },
157
+ "task_name": "RiaNewsRetrieval"
158
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/RuBQReranking.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "2e96b8f098fa4b0950fc58eacadeb31c0d0c7fa2",
3
+ "evaluation_time": 132.47593069076538,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.49",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.6865102847141421,
14
+ "map": 0.6865102847141421,
15
+ "mrr": 0.7420798665479517,
16
+ "nAUC_map_diff1": 0.395699900431962,
17
+ "nAUC_map_max": 0.16323410729615467,
18
+ "nAUC_map_std": 0.06963589388310379,
19
+ "nAUC_mrr_diff1": 0.4343608765293291,
20
+ "nAUC_mrr_max": 0.2069174089988326,
21
+ "nAUC_mrr_std": 0.09441690837227301
22
+ }
23
+ ]
24
+ },
25
+ "task_name": "RuBQReranking"
26
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/RuBQRetrieval.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "e19b6ffa60b3bc248e0b41f4cc37c26a55c2a67b",
3
+ "evaluation_time": 161.2447590827942,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.49",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.65712,
14
+ "map_at_1": 0.37843,
15
+ "map_at_10": 0.57496,
16
+ "map_at_100": 0.58542,
17
+ "map_at_1000": 0.58572,
18
+ "map_at_20": 0.58233,
19
+ "map_at_3": 0.52096,
20
+ "map_at_5": 0.55506,
21
+ "mrr_at_1": 0.5443262411347518,
22
+ "mrr_at_10": 0.6662377106082787,
23
+ "mrr_at_100": 0.6697531681522045,
24
+ "mrr_at_1000": 0.669858331719073,
25
+ "mrr_at_20": 0.6685101016534366,
26
+ "mrr_at_3": 0.6417454688731294,
27
+ "mrr_at_5": 0.658205279747834,
28
+ "nauc_map_at_1000_diff1": 0.38489782098626746,
29
+ "nauc_map_at_1000_max": 0.25568618482559013,
30
+ "nauc_map_at_1000_std": -0.13146798419264624,
31
+ "nauc_map_at_100_diff1": 0.3848361277142902,
32
+ "nauc_map_at_100_max": 0.25587562658336904,
33
+ "nauc_map_at_100_std": -0.13116899108628735,
34
+ "nauc_map_at_10_diff1": 0.38108574170265186,
35
+ "nauc_map_at_10_max": 0.2528285942730049,
36
+ "nauc_map_at_10_std": -0.13834215507780068,
37
+ "nauc_map_at_1_diff1": 0.4266378883140734,
38
+ "nauc_map_at_1_max": 0.17939722909875316,
39
+ "nauc_map_at_1_std": -0.11957260225662084,
40
+ "nauc_map_at_20_diff1": 0.3846452464748248,
41
+ "nauc_map_at_20_max": 0.25587379180228204,
42
+ "nauc_map_at_20_std": -0.13341947145408165,
43
+ "nauc_map_at_3_diff1": 0.38560686212736306,
44
+ "nauc_map_at_3_max": 0.23537491209695116,
45
+ "nauc_map_at_3_std": -0.14700150079763571,
46
+ "nauc_map_at_5_diff1": 0.3820589580644066,
47
+ "nauc_map_at_5_max": 0.24393314625303994,
48
+ "nauc_map_at_5_std": -0.14617101844168257,
49
+ "nauc_mrr_at_1000_diff1": 0.44593062841654596,
50
+ "nauc_mrr_at_1000_max": 0.2870587084634216,
51
+ "nauc_mrr_at_1000_std": -0.15902041846417087,
52
+ "nauc_mrr_at_100_diff1": 0.4459536546140403,
53
+ "nauc_mrr_at_100_max": 0.28720871575174617,
54
+ "nauc_mrr_at_100_std": -0.15881760337337933,
55
+ "nauc_mrr_at_10_diff1": 0.4433930533184716,
56
+ "nauc_mrr_at_10_max": 0.2863521335778894,
57
+ "nauc_mrr_at_10_std": -0.16084055392796348,
58
+ "nauc_mrr_at_1_diff1": 0.4823675118424224,
59
+ "nauc_mrr_at_1_max": 0.2610557049810936,
60
+ "nauc_mrr_at_1_std": -0.14578539245018504,
61
+ "nauc_mrr_at_20_diff1": 0.44560478765467043,
62
+ "nauc_mrr_at_20_max": 0.28726332483816513,
63
+ "nauc_mrr_at_20_std": -0.15936771583470377,
64
+ "nauc_mrr_at_3_diff1": 0.4460664738453841,
65
+ "nauc_mrr_at_3_max": 0.2891172189678989,
66
+ "nauc_mrr_at_3_std": -0.1676487871527442,
67
+ "nauc_mrr_at_5_diff1": 0.44196649289218565,
68
+ "nauc_mrr_at_5_max": 0.28580357606541945,
69
+ "nauc_mrr_at_5_std": -0.16842747636832658,
70
+ "nauc_ndcg_at_1000_diff1": 0.38987732608054115,
71
+ "nauc_ndcg_at_1000_max": 0.27852746663610395,
72
+ "nauc_ndcg_at_1000_std": -0.11835734568498252,
73
+ "nauc_ndcg_at_100_diff1": 0.38890554616361295,
74
+ "nauc_ndcg_at_100_max": 0.2836998751967851,
75
+ "nauc_ndcg_at_100_std": -0.1102692339803778,
76
+ "nauc_ndcg_at_10_diff1": 0.37541565374964214,
77
+ "nauc_ndcg_at_10_max": 0.27490596783567856,
78
+ "nauc_ndcg_at_10_std": -0.13592710357683446,
79
+ "nauc_ndcg_at_1_diff1": 0.4852870698862955,
80
+ "nauc_ndcg_at_1_max": 0.2593247777854429,
81
+ "nauc_ndcg_at_1_std": -0.14385069455260055,
82
+ "nauc_ndcg_at_20_diff1": 0.3859456423392091,
83
+ "nauc_ndcg_at_20_max": 0.283676633325501,
84
+ "nauc_ndcg_at_20_std": -0.12222997969091237,
85
+ "nauc_ndcg_at_3_diff1": 0.38646849873108885,
86
+ "nauc_ndcg_at_3_max": 0.25623486576562504,
87
+ "nauc_ndcg_at_3_std": -0.16151251692518964,
88
+ "nauc_ndcg_at_5_diff1": 0.3774752567380064,
89
+ "nauc_ndcg_at_5_max": 0.2601399815977819,
90
+ "nauc_ndcg_at_5_std": -0.15876839876767943,
91
+ "nauc_precision_at_1000_diff1": -0.06940539726469348,
92
+ "nauc_precision_at_1000_max": 0.07735411184105688,
93
+ "nauc_precision_at_1000_std": 0.07340706987272318,
94
+ "nauc_precision_at_100_diff1": -0.0499940747054876,
95
+ "nauc_precision_at_100_max": 0.11591515168739037,
96
+ "nauc_precision_at_100_std": 0.09527063403346367,
97
+ "nauc_precision_at_10_diff1": 0.01816608735929714,
98
+ "nauc_precision_at_10_max": 0.1704492963431668,
99
+ "nauc_precision_at_10_std": 0.0024704163075178528,
100
+ "nauc_precision_at_1_diff1": 0.4852870698862955,
101
+ "nauc_precision_at_1_max": 0.2593247777854429,
102
+ "nauc_precision_at_1_std": -0.14385069455260055,
103
+ "nauc_precision_at_20_diff1": -0.001471573570733137,
104
+ "nauc_precision_at_20_max": 0.15476227734460227,
105
+ "nauc_precision_at_20_std": 0.047569343423503524,
106
+ "nauc_precision_at_3_diff1": 0.1687128281454292,
107
+ "nauc_precision_at_3_max": 0.23228624076185875,
108
+ "nauc_precision_at_3_std": -0.0974123472142008,
109
+ "nauc_precision_at_5_diff1": 0.08823766381321074,
110
+ "nauc_precision_at_5_max": 0.19444428010325754,
111
+ "nauc_precision_at_5_std": -0.06455072094601802,
112
+ "nauc_recall_at_1000_diff1": 0.01813522530021961,
113
+ "nauc_recall_at_1000_max": 0.33758054529775305,
114
+ "nauc_recall_at_1000_std": 0.41001687802284215,
115
+ "nauc_recall_at_100_diff1": 0.19185481742024948,
116
+ "nauc_recall_at_100_max": 0.4064369786233169,
117
+ "nauc_recall_at_100_std": 0.29713335687232767,
118
+ "nauc_recall_at_10_diff1": 0.220931930281711,
119
+ "nauc_recall_at_10_max": 0.28138214146792095,
120
+ "nauc_recall_at_10_std": -0.08003759962512386,
121
+ "nauc_recall_at_1_diff1": 0.4266378883140734,
122
+ "nauc_recall_at_1_max": 0.17939722909875316,
123
+ "nauc_recall_at_1_std": -0.11957260225662084,
124
+ "nauc_recall_at_20_diff1": 0.24550853933099698,
125
+ "nauc_recall_at_20_max": 0.3430819688642811,
126
+ "nauc_recall_at_20_std": 0.012282539805144784,
127
+ "nauc_recall_at_3_diff1": 0.31362544680466026,
128
+ "nauc_recall_at_3_max": 0.23969015382166212,
129
+ "nauc_recall_at_3_std": -0.16497991993264738,
130
+ "nauc_recall_at_5_diff1": 0.2660363920455184,
131
+ "nauc_recall_at_5_max": 0.2402071075233783,
132
+ "nauc_recall_at_5_std": -0.16258069295593652,
133
+ "ndcg_at_1": 0.54314,
134
+ "ndcg_at_10": 0.65712,
135
+ "ndcg_at_100": 0.69061,
136
+ "ndcg_at_1000": 0.69642,
137
+ "ndcg_at_20": 0.67512,
138
+ "ndcg_at_3": 0.58189,
139
+ "ndcg_at_5": 0.62304,
140
+ "precision_at_1": 0.54314,
141
+ "precision_at_10": 0.1315,
142
+ "precision_at_100": 0.01559,
143
+ "precision_at_1000": 0.00163,
144
+ "precision_at_20": 0.07151,
145
+ "precision_at_3": 0.32171,
146
+ "precision_at_5": 0.22849,
147
+ "recall_at_1": 0.37843,
148
+ "recall_at_10": 0.80335,
149
+ "recall_at_100": 0.93299,
150
+ "recall_at_1000": 0.97289,
151
+ "recall_at_20": 0.86255,
152
+ "recall_at_3": 0.61962,
153
+ "recall_at_5": 0.71615
154
+ }
155
+ ]
156
+ },
157
+ "task_name": "RuBQRetrieval"
158
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/RuReviewsClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "f6d2c31f4dc6b88f468552750bfec05b4b41b05a",
3
+ "evaluation_time": 7.37565541267395,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.49",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.645849609375,
10
+ "f1": 0.6396368173239964,
11
+ "f1_weighted": 0.6396482257740976,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.645849609375,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.67041015625,
20
+ "f1": 0.6652440931699565,
21
+ "f1_weighted": 0.6652570047086968
22
+ },
23
+ {
24
+ "accuracy": 0.59423828125,
25
+ "f1": 0.5859005685485178,
26
+ "f1_weighted": 0.5859350281729734
27
+ },
28
+ {
29
+ "accuracy": 0.65673828125,
30
+ "f1": 0.6608356423636459,
31
+ "f1_weighted": 0.6608412502703905
32
+ },
33
+ {
34
+ "accuracy": 0.6650390625,
35
+ "f1": 0.6633886339442093,
36
+ "f1_weighted": 0.6634150378970164
37
+ },
38
+ {
39
+ "accuracy": 0.67236328125,
40
+ "f1": 0.6769200825161508,
41
+ "f1_weighted": 0.676923454457222
42
+ },
43
+ {
44
+ "accuracy": 0.638671875,
45
+ "f1": 0.6326608318210637,
46
+ "f1_weighted": 0.6326802646758448
47
+ },
48
+ {
49
+ "accuracy": 0.62841796875,
50
+ "f1": 0.6152033348547227,
51
+ "f1_weighted": 0.6152097082908654
52
+ },
53
+ {
54
+ "accuracy": 0.60400390625,
55
+ "f1": 0.5856033622185024,
56
+ "f1_weighted": 0.5855962514329456
57
+ },
58
+ {
59
+ "accuracy": 0.65625,
60
+ "f1": 0.6424911425936807,
61
+ "f1_weighted": 0.6424710886379962
62
+ },
63
+ {
64
+ "accuracy": 0.67236328125,
65
+ "f1": 0.6681204812095144,
66
+ "f1_weighted": 0.668153169197025
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "RuReviewsClassification"
73
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/RuSTSBenchmarkSTS.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "7cf24f325c6da6195df55bef3d86b5e0616f3018",
3
+ "evaluation_time": 1.3007500171661377,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.49",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "cosine_pearson": 0.8223645607732791,
10
+ "cosine_spearman": 0.8176680136907917,
11
+ "euclidean_pearson": 0.8116260754635747,
12
+ "euclidean_spearman": 0.8176837242717678,
13
+ "hf_subset": "default",
14
+ "languages": [
15
+ "rus-Cyrl"
16
+ ],
17
+ "main_score": 0.8176680136907917,
18
+ "manhattan_pearson": 0.811547428752827,
19
+ "manhattan_spearman": 0.8173170363523264,
20
+ "pearson": [
21
+ 0.8223645636202919,
22
+ 2.239381197392e-311
23
+ ],
24
+ "spearman": [
25
+ 0.8176593597675507,
26
+ 6.433397916040341e-305
27
+ ]
28
+ }
29
+ ]
30
+ },
31
+ "task_name": "RuSTSBenchmarkSTS"
32
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/RuSciBenchGRNTIClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "673a610d6d3dd91a547a0d57ae1b56f37ebbf6a1",
3
+ "evaluation_time": 30.41159462928772,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.49",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.566748046875,
10
+ "f1": 0.5527289651020678,
11
+ "f1_weighted": 0.5528565301737085,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.566748046875,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.57080078125,
20
+ "f1": 0.5535612238862618,
21
+ "f1_weighted": 0.5536454920454653
22
+ },
23
+ {
24
+ "accuracy": 0.56982421875,
25
+ "f1": 0.5611962510921882,
26
+ "f1_weighted": 0.5613406422819829
27
+ },
28
+ {
29
+ "accuracy": 0.55419921875,
30
+ "f1": 0.5423791533782084,
31
+ "f1_weighted": 0.5425347288009397
32
+ },
33
+ {
34
+ "accuracy": 0.583984375,
35
+ "f1": 0.5743929379764394,
36
+ "f1_weighted": 0.5745220437987415
37
+ },
38
+ {
39
+ "accuracy": 0.58251953125,
40
+ "f1": 0.5761330279404718,
41
+ "f1_weighted": 0.5762331260761724
42
+ },
43
+ {
44
+ "accuracy": 0.53564453125,
45
+ "f1": 0.5148647177231223,
46
+ "f1_weighted": 0.5150032471036498
47
+ },
48
+ {
49
+ "accuracy": 0.57421875,
50
+ "f1": 0.5564420533348435,
51
+ "f1_weighted": 0.5566121168523138
52
+ },
53
+ {
54
+ "accuracy": 0.55126953125,
55
+ "f1": 0.531689530067102,
56
+ "f1_weighted": 0.5318358705956509
57
+ },
58
+ {
59
+ "accuracy": 0.57568359375,
60
+ "f1": 0.5610016498936795,
61
+ "f1_weighted": 0.5611240236147885
62
+ },
63
+ {
64
+ "accuracy": 0.5693359375,
65
+ "f1": 0.5556291057283611,
66
+ "f1_weighted": 0.5557140105673802
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "RuSciBenchGRNTIClassification"
73
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/RuSciBenchGRNTIClusteringP2P.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "673a610d6d3dd91a547a0d57ae1b56f37ebbf6a1",
3
+ "evaluation_time": 17.371254444122314,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.49",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.5064389070110268,
14
+ "v_measure": 0.5064389070110268,
15
+ "v_measure_std": 0.01176930168172253,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.49281135173400864,
19
+ 0.5087624779003245,
20
+ 0.5136909867366994,
21
+ 0.5009100506283635,
22
+ 0.5048009718242867,
23
+ 0.504690458821357,
24
+ 0.49977006835570603,
25
+ 0.4951496167190289,
26
+ 0.5369854226640096,
27
+ 0.5068176647264838
28
+ ]
29
+ }
30
+ }
31
+ ]
32
+ },
33
+ "task_name": "RuSciBenchGRNTIClusteringP2P"
34
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/RuSciBenchOECDClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "26c88e99dcaba32bb45d0e1bfc21902337f6d471",
3
+ "evaluation_time": 34.89335060119629,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.49",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.435791015625,
10
+ "f1": 0.41273902380598343,
11
+ "f1_weighted": 0.4127761664424054,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.435791015625,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.43505859375,
20
+ "f1": 0.41096661507062665,
21
+ "f1_weighted": 0.4111559510706326
22
+ },
23
+ {
24
+ "accuracy": 0.43505859375,
25
+ "f1": 0.41408345126889184,
26
+ "f1_weighted": 0.41408062652099953
27
+ },
28
+ {
29
+ "accuracy": 0.43115234375,
30
+ "f1": 0.40832018484041477,
31
+ "f1_weighted": 0.4084260362201456
32
+ },
33
+ {
34
+ "accuracy": 0.42626953125,
35
+ "f1": 0.38849562321523623,
36
+ "f1_weighted": 0.3884183755084082
37
+ },
38
+ {
39
+ "accuracy": 0.42822265625,
40
+ "f1": 0.39580282307473114,
41
+ "f1_weighted": 0.39581339837686746
42
+ },
43
+ {
44
+ "accuracy": 0.435546875,
45
+ "f1": 0.4223818014621926,
46
+ "f1_weighted": 0.4223855899543814
47
+ },
48
+ {
49
+ "accuracy": 0.4345703125,
50
+ "f1": 0.4091195866151694,
51
+ "f1_weighted": 0.4091651075978766
52
+ },
53
+ {
54
+ "accuracy": 0.46240234375,
55
+ "f1": 0.4522984012923603,
56
+ "f1_weighted": 0.4523804593969006
57
+ },
58
+ {
59
+ "accuracy": 0.42431640625,
60
+ "f1": 0.39580476401500836,
61
+ "f1_weighted": 0.39574925816407375
62
+ },
63
+ {
64
+ "accuracy": 0.4453125,
65
+ "f1": 0.43011698720520336,
66
+ "f1_weighted": 0.4301868616137684
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "RuSciBenchOECDClassification"
73
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/RuSciBenchOECDClusteringP2P.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "26c88e99dcaba32bb45d0e1bfc21902337f6d471",
3
+ "evaluation_time": 16.834617853164673,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.49",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.444828415881685,
14
+ "v_measure": 0.444828415881685,
15
+ "v_measure_std": 0.011894719691827666,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.4620218825456935,
19
+ 0.437734533114208,
20
+ 0.43300097593964226,
21
+ 0.46843165352909294,
22
+ 0.4351918470474627,
23
+ 0.44788691457226104,
24
+ 0.45046687355966764,
25
+ 0.4313733681737988,
26
+ 0.4448877229368492,
27
+ 0.43728838739817383
28
+ ]
29
+ }
30
+ }
31
+ ]
32
+ },
33
+ "task_name": "RuSciBenchOECDClusteringP2P"
34
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/SIB200Classification.json ADDED
@@ -0,0 +1,201 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "a74d7350ea12af010cfb1c21e34f1f81fd2e615b",
3
+ "evaluation_time": 7.555631637573242,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.85",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.7455882352941177,
10
+ "f1": 0.7334352907925354,
11
+ "f1_weighted": 0.7426470873660249,
12
+ "hf_subset": "rus_Cyrl",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.7455882352941177,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.7745098039215687,
20
+ "f1": 0.7583447268150725,
21
+ "f1_weighted": 0.7742233563503075
22
+ },
23
+ {
24
+ "accuracy": 0.7745098039215687,
25
+ "f1": 0.7672498688773552,
26
+ "f1_weighted": 0.7685536354933228
27
+ },
28
+ {
29
+ "accuracy": 0.6568627450980392,
30
+ "f1": 0.6542789505773847,
31
+ "f1_weighted": 0.6492575466804521
32
+ },
33
+ {
34
+ "accuracy": 0.7794117647058824,
35
+ "f1": 0.7628915399055034,
36
+ "f1_weighted": 0.7764559517766063
37
+ },
38
+ {
39
+ "accuracy": 0.7401960784313726,
40
+ "f1": 0.7363059875283949,
41
+ "f1_weighted": 0.7421521173167909
42
+ },
43
+ {
44
+ "accuracy": 0.7254901960784313,
45
+ "f1": 0.7111739057058488,
46
+ "f1_weighted": 0.7259293599115941
47
+ },
48
+ {
49
+ "accuracy": 0.7696078431372549,
50
+ "f1": 0.7541551996669461,
51
+ "f1_weighted": 0.7650143053586376
52
+ },
53
+ {
54
+ "accuracy": 0.75,
55
+ "f1": 0.7386818236166091,
56
+ "f1_weighted": 0.7486902365278683
57
+ },
58
+ {
59
+ "accuracy": 0.7205882352941176,
60
+ "f1": 0.7020466413162334,
61
+ "f1_weighted": 0.7168502506742749
62
+ },
63
+ {
64
+ "accuracy": 0.7647058823529411,
65
+ "f1": 0.7492242639160062,
66
+ "f1_weighted": 0.7593441135703953
67
+ }
68
+ ]
69
+ }
70
+ ],
71
+ "train": [
72
+ {
73
+ "accuracy": 0.7128388017118402,
74
+ "f1": 0.7032586834035689,
75
+ "f1_weighted": 0.711976523151973,
76
+ "hf_subset": "rus_Cyrl",
77
+ "languages": [
78
+ "rus-Cyrl"
79
+ ],
80
+ "main_score": 0.7128388017118402,
81
+ "scores_per_experiment": [
82
+ {
83
+ "accuracy": 0.7417974322396577,
84
+ "f1": 0.7257461818410244,
85
+ "f1_weighted": 0.744192743953624
86
+ },
87
+ {
88
+ "accuracy": 0.7075606276747504,
89
+ "f1": 0.6968734603221395,
90
+ "f1_weighted": 0.7054325794065446
91
+ },
92
+ {
93
+ "accuracy": 0.6861626248216833,
94
+ "f1": 0.6741139653756333,
95
+ "f1_weighted": 0.6832472153163306
96
+ },
97
+ {
98
+ "accuracy": 0.7289586305278174,
99
+ "f1": 0.7140581638015657,
100
+ "f1_weighted": 0.7275261217780306
101
+ },
102
+ {
103
+ "accuracy": 0.7275320970042796,
104
+ "f1": 0.7177610781254736,
105
+ "f1_weighted": 0.729436262854234
106
+ },
107
+ {
108
+ "accuracy": 0.7075606276747504,
109
+ "f1": 0.7089942206508191,
110
+ "f1_weighted": 0.708102933932473
111
+ },
112
+ {
113
+ "accuracy": 0.7146932952924394,
114
+ "f1": 0.7057843178596916,
115
+ "f1_weighted": 0.7104384066769934
116
+ },
117
+ {
118
+ "accuracy": 0.68188302425107,
119
+ "f1": 0.6714389052523281,
120
+ "f1_weighted": 0.682829475923571
121
+ },
122
+ {
123
+ "accuracy": 0.6875891583452212,
124
+ "f1": 0.6818272129474078,
125
+ "f1_weighted": 0.6852515570999035
126
+ },
127
+ {
128
+ "accuracy": 0.7446504992867332,
129
+ "f1": 0.7359893278596059,
130
+ "f1_weighted": 0.7433079345780264
131
+ }
132
+ ]
133
+ }
134
+ ],
135
+ "validation": [
136
+ {
137
+ "accuracy": 0.6919191919191919,
138
+ "f1": 0.6731255834020716,
139
+ "f1_weighted": 0.6939417497065212,
140
+ "hf_subset": "rus_Cyrl",
141
+ "languages": [
142
+ "rus-Cyrl"
143
+ ],
144
+ "main_score": 0.6919191919191919,
145
+ "scores_per_experiment": [
146
+ {
147
+ "accuracy": 0.7373737373737373,
148
+ "f1": 0.7123824337408732,
149
+ "f1_weighted": 0.738885003946901
150
+ },
151
+ {
152
+ "accuracy": 0.696969696969697,
153
+ "f1": 0.6786528286528286,
154
+ "f1_weighted": 0.7025712970157415
155
+ },
156
+ {
157
+ "accuracy": 0.6262626262626263,
158
+ "f1": 0.6239237845684623,
159
+ "f1_weighted": 0.6261606297588307
160
+ },
161
+ {
162
+ "accuracy": 0.7676767676767676,
163
+ "f1": 0.7592857142857142,
164
+ "f1_weighted": 0.7686700336700336
165
+ },
166
+ {
167
+ "accuracy": 0.6464646464646465,
168
+ "f1": 0.6058703236496558,
169
+ "f1_weighted": 0.6447972527042295
170
+ },
171
+ {
172
+ "accuracy": 0.6161616161616161,
173
+ "f1": 0.6175795656988782,
174
+ "f1_weighted": 0.6265529579584179
175
+ },
176
+ {
177
+ "accuracy": 0.6868686868686869,
178
+ "f1": 0.667457455941019,
179
+ "f1_weighted": 0.6962463803821174
180
+ },
181
+ {
182
+ "accuracy": 0.7171717171717171,
183
+ "f1": 0.6891181430223134,
184
+ "f1_weighted": 0.7141753910810941
185
+ },
186
+ {
187
+ "accuracy": 0.696969696969697,
188
+ "f1": 0.6946394655443742,
189
+ "f1_weighted": 0.6996940450808682
190
+ },
191
+ {
192
+ "accuracy": 0.7272727272727273,
193
+ "f1": 0.682346118916597,
194
+ "f1_weighted": 0.7216645054669778
195
+ }
196
+ ]
197
+ }
198
+ ]
199
+ },
200
+ "task_name": "SIB200Classification"
201
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/SIB200ClusteringS2S.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "a74d7350ea12af010cfb1c21e34f1f81fd2e615b",
3
+ "evaluation_time": 6.074451684951782,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.85",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "rus_Cyrl",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.3716305703369924,
14
+ "v_measure": 0.3716305703369924,
15
+ "v_measure_std": 0.033258710726729514,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.33476101481850945,
19
+ 0.345638265152443,
20
+ 0.4261770327094149,
21
+ 0.39547243736407306,
22
+ 0.3426606688581418,
23
+ 0.34426751456777294,
24
+ 0.3497646798025846,
25
+ 0.3733147898868165,
26
+ 0.3742501135211474,
27
+ 0.4299991866890208
28
+ ]
29
+ }
30
+ }
31
+ ]
32
+ },
33
+ "task_name": "SIB200ClusteringS2S"
34
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/STS22.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3",
3
+ "evaluation_time": 3.586103916168213,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.85",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "cosine_pearson": 0.4066148085995761,
10
+ "cosine_spearman": 0.6289443980931155,
11
+ "euclidean_pearson": 0.4884936945039135,
12
+ "euclidean_spearman": 0.628888476631823,
13
+ "hf_subset": "ru",
14
+ "languages": [
15
+ "rus-Cyrl"
16
+ ],
17
+ "main_score": 0.6289443980931155,
18
+ "manhattan_pearson": 0.4997705873213072,
19
+ "manhattan_spearman": 0.6287171648236973,
20
+ "pearson": [
21
+ 0.4066148023073311,
22
+ 3.2589658265005604e-12
23
+ ],
24
+ "spearman": [
25
+ 0.628888476631823,
26
+ 3.043718148331608e-31
27
+ ]
28
+ }
29
+ ]
30
+ },
31
+ "task_name": "STS22"
32
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/STS22.v2.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "d31f33a128469b20e357535c39b82fb3c3f6f2bd",
3
+ "evaluation_time": 3.7387850284576416,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.85",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "cosine_pearson": 0.4066148085995761,
10
+ "cosine_spearman": 0.6289443980931155,
11
+ "euclidean_pearson": 0.4884936945039135,
12
+ "euclidean_spearman": 0.628888476631823,
13
+ "hf_subset": "ru",
14
+ "languages": [
15
+ "rus-Cyrl"
16
+ ],
17
+ "main_score": 0.6289443980931155,
18
+ "manhattan_pearson": 0.4997705873213072,
19
+ "manhattan_spearman": 0.6287171648236973,
20
+ "pearson": [
21
+ 0.4066148023073311,
22
+ 3.2589658265005604e-12
23
+ ],
24
+ "spearman": [
25
+ 0.628888476631823,
26
+ 3.043718148331608e-31
27
+ ]
28
+ }
29
+ ]
30
+ },
31
+ "task_name": "STS22.v2"
32
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/STSBenchmarkMultilingualSTS.json ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "29afa2569dcedaaa2fe6a3dcfebab33d28b82e8c",
3
+ "evaluation_time": 6.7084801197052,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.85",
6
+ "scores": {
7
+ "dev": [
8
+ {
9
+ "cosine_pearson": 0.8632721888088014,
10
+ "cosine_spearman": 0.8640212589115892,
11
+ "euclidean_pearson": 0.8534970378188562,
12
+ "euclidean_spearman": 0.8640233107472571,
13
+ "hf_subset": "ru",
14
+ "languages": [
15
+ "rus-Cyrl"
16
+ ],
17
+ "main_score": 0.8640212589115892,
18
+ "manhattan_pearson": 0.8539982856774622,
19
+ "manhattan_spearman": 0.8646553265931135,
20
+ "pearson": [
21
+ 0.8632721748631313,
22
+ 0.0
23
+ ],
24
+ "spearman": [
25
+ 0.8640251355266284,
26
+ 0.0
27
+ ]
28
+ }
29
+ ],
30
+ "test": [
31
+ {
32
+ "cosine_pearson": 0.8246691331479675,
33
+ "cosine_spearman": 0.8181486575322936,
34
+ "euclidean_pearson": 0.8131852427368632,
35
+ "euclidean_spearman": 0.8181395198814811,
36
+ "hf_subset": "ru",
37
+ "languages": [
38
+ "rus-Cyrl"
39
+ ],
40
+ "main_score": 0.8181486575322936,
41
+ "manhattan_pearson": 0.8130736909141751,
42
+ "manhattan_spearman": 0.8178538376191475,
43
+ "pearson": [
44
+ 0.8246691362157792,
45
+ 0.0
46
+ ],
47
+ "spearman": [
48
+ 0.8181505147253395,
49
+ 0.0
50
+ ]
51
+ }
52
+ ]
53
+ },
54
+ "task_name": "STSBenchmarkMultilingualSTS"
55
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/SensitiveTopicsClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "416b34a802308eac30e4192afc0ff99bb8dcc7f2",
3
+ "evaluation_time": 7.074731111526489,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.49",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.275244140625,
10
+ "f1": 0.29150075941334463,
11
+ "hf_subset": "default",
12
+ "languages": [
13
+ "rus-Cyrl"
14
+ ],
15
+ "lrap": 0.4139275444878396,
16
+ "main_score": 0.275244140625,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.29052734375,
20
+ "f1": 0.26712103086826444,
21
+ "lrap": 0.4333631727430479
22
+ },
23
+ {
24
+ "accuracy": 0.263671875,
25
+ "f1": 0.2975992837613153,
26
+ "lrap": 0.39885118272568665
27
+ },
28
+ {
29
+ "accuracy": 0.244140625,
30
+ "f1": 0.2615599599659626,
31
+ "lrap": 0.3748914930555482
32
+ },
33
+ {
34
+ "accuracy": 0.27685546875,
35
+ "f1": 0.3068034525405748,
36
+ "lrap": 0.4089287651909646
37
+ },
38
+ {
39
+ "accuracy": 0.26416015625,
40
+ "f1": 0.26058632149841443,
41
+ "lrap": 0.3940429687499923
42
+ },
43
+ {
44
+ "accuracy": 0.2939453125,
45
+ "f1": 0.32408498616323694,
46
+ "lrap": 0.444132486979159
47
+ },
48
+ {
49
+ "accuracy": 0.28125,
50
+ "f1": 0.3043677911005396,
51
+ "lrap": 0.440470377604159
52
+ },
53
+ {
54
+ "accuracy": 0.2763671875,
55
+ "f1": 0.2935574746912606,
56
+ "lrap": 0.4099460177951309
57
+ },
58
+ {
59
+ "accuracy": 0.27099609375,
60
+ "f1": 0.3072664138257324,
61
+ "lrap": 0.41335720486110344
62
+ },
63
+ {
64
+ "accuracy": 0.29052734375,
65
+ "f1": 0.2920608797181457,
66
+ "lrap": 0.42129177517360356
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "SensitiveTopicsClassification"
73
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/TERRa.json ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "7b58f24536063837d644aab9a023c62199b2a612",
3
+ "evaluation_time": 0.6824915409088135,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.49",
6
+ "scores": {
7
+ "dev": [
8
+ {
9
+ "cosine": {
10
+ "accuracy": 0.5928338762214984,
11
+ "accuracy_threshold": 0.826184868812561,
12
+ "ap": 0.5781343526166172,
13
+ "f1": 0.669603524229075,
14
+ "f1_threshold": 0.6166716814041138,
15
+ "precision": 0.5049833887043189,
16
+ "recall": 0.9934640522875817
17
+ },
18
+ "dot": {
19
+ "accuracy": 0.5928338762214984,
20
+ "accuracy_threshold": 0.826184868812561,
21
+ "ap": 0.5781343526166172,
22
+ "f1": 0.669603524229075,
23
+ "f1_threshold": 0.6166718006134033,
24
+ "precision": 0.5049833887043189,
25
+ "recall": 0.9934640522875817
26
+ },
27
+ "euclidean": {
28
+ "accuracy": 0.5928338762214984,
29
+ "accuracy_threshold": 0.5896016955375671,
30
+ "ap": 0.5781343526166172,
31
+ "f1": 0.669603524229075,
32
+ "f1_threshold": 0.8755891919136047,
33
+ "precision": 0.5049833887043189,
34
+ "recall": 0.9934640522875817
35
+ },
36
+ "hf_subset": "default",
37
+ "languages": [
38
+ "rus-Cyrl"
39
+ ],
40
+ "main_score": 0.5781343526166172,
41
+ "manhattan": {
42
+ "accuracy": 0.5928338762214984,
43
+ "accuracy_threshold": 13.14597225189209,
44
+ "ap": 0.5766665355054674,
45
+ "f1": 0.669603524229075,
46
+ "f1_threshold": 19.16716957092285,
47
+ "precision": 0.5049833887043189,
48
+ "recall": 0.9934640522875817
49
+ },
50
+ "max": {
51
+ "accuracy": 0.5928338762214984,
52
+ "ap": 0.5781343526166172,
53
+ "f1": 0.669603524229075
54
+ },
55
+ "similarity": {
56
+ "accuracy": 0.5928338762214984,
57
+ "accuracy_threshold": 0.826184868812561,
58
+ "ap": 0.5781343526166172,
59
+ "f1": 0.669603524229075,
60
+ "f1_threshold": 0.6166719198226929,
61
+ "precision": 0.5049833887043189,
62
+ "recall": 0.9934640522875817
63
+ }
64
+ }
65
+ ]
66
+ },
67
+ "task_name": "TERRa"
68
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/Tatoeba.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "69e8f12da6e31d59addadda9a9c8a2e601a0e282",
3
+ "evaluation_time": 0.7637653350830078,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.85",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.948,
10
+ "f1": 0.9321666666666666,
11
+ "hf_subset": "rus-eng",
12
+ "languages": [
13
+ "rus-Cyrl",
14
+ "eng-Latn"
15
+ ],
16
+ "main_score": 0.9321666666666666,
17
+ "precision": 0.9243333333333332,
18
+ "recall": 0.948
19
+ }
20
+ ]
21
+ },
22
+ "task_name": "Tatoeba"
23
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/XNLI.json ADDED
@@ -0,0 +1,105 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "09698e0180d87dc247ca447d3a1248b931ac0cdb",
3
+ "evaluation_time": 6.922283887863159,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.85",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "cosine_accuracy": 0.7091575091575092,
10
+ "cosine_accuracy_threshold": 0.7832814455032349,
11
+ "cosine_ap": 0.7709500397739182,
12
+ "cosine_f1": 0.7359364659166115,
13
+ "cosine_f1_threshold": 0.7531269788742065,
14
+ "cosine_precision": 0.6706875753920386,
15
+ "cosine_recall": 0.8152492668621701,
16
+ "dot_accuracy": 0.7091575091575092,
17
+ "dot_accuracy_threshold": 0.7832813262939453,
18
+ "dot_ap": 0.7709500397739182,
19
+ "dot_f1": 0.7359364659166115,
20
+ "dot_f1_threshold": 0.7531269788742065,
21
+ "dot_precision": 0.6706875753920386,
22
+ "dot_recall": 0.8152492668621701,
23
+ "euclidean_accuracy": 0.7091575091575092,
24
+ "euclidean_accuracy_threshold": 0.6583592891693115,
25
+ "euclidean_ap": 0.7709500397739182,
26
+ "euclidean_f1": 0.7359364659166115,
27
+ "euclidean_f1_threshold": 0.7026705741882324,
28
+ "euclidean_precision": 0.6706875753920386,
29
+ "euclidean_recall": 0.8152492668621701,
30
+ "hf_subset": "ru",
31
+ "languages": [
32
+ "rus-Cyrl"
33
+ ],
34
+ "main_score": 0.7709500397739182,
35
+ "manhattan_accuracy": 0.7040293040293041,
36
+ "manhattan_accuracy_threshold": 15.276962280273438,
37
+ "manhattan_ap": 0.7682522549831512,
38
+ "manhattan_f1": 0.7317397078353253,
39
+ "manhattan_f1_threshold": 15.276962280273438,
40
+ "manhattan_precision": 0.6686893203883495,
41
+ "manhattan_recall": 0.8079178885630498,
42
+ "max_ap": 0.7709500397739182,
43
+ "max_f1": 0.7359364659166115,
44
+ "max_precision": 0.6706875753920386,
45
+ "max_recall": 0.8152492668621701,
46
+ "similarity_accuracy": 0.7091575091575092,
47
+ "similarity_accuracy_threshold": 0.7832814455032349,
48
+ "similarity_ap": 0.7709500397739182,
49
+ "similarity_f1": 0.7359364659166115,
50
+ "similarity_f1_threshold": 0.7531269788742065,
51
+ "similarity_precision": 0.6706875753920386,
52
+ "similarity_recall": 0.8152492668621701
53
+ }
54
+ ],
55
+ "validation": [
56
+ {
57
+ "cosine_accuracy": 0.7201465201465201,
58
+ "cosine_accuracy_threshold": 0.7796648740768433,
59
+ "cosine_ap": 0.7832647958326411,
60
+ "cosine_f1": 0.7314578005115089,
61
+ "cosine_f1_threshold": 0.7443016767501831,
62
+ "cosine_precision": 0.6485260770975056,
63
+ "cosine_recall": 0.8387096774193549,
64
+ "dot_accuracy": 0.7201465201465201,
65
+ "dot_accuracy_threshold": 0.7796648740768433,
66
+ "dot_ap": 0.7832647958326411,
67
+ "dot_f1": 0.7314578005115089,
68
+ "dot_f1_threshold": 0.7443017363548279,
69
+ "dot_precision": 0.6485260770975056,
70
+ "dot_recall": 0.8387096774193549,
71
+ "euclidean_accuracy": 0.7201465201465201,
72
+ "euclidean_accuracy_threshold": 0.6638299822807312,
73
+ "euclidean_ap": 0.7832647958326411,
74
+ "euclidean_f1": 0.7314578005115089,
75
+ "euclidean_f1_threshold": 0.7151200175285339,
76
+ "euclidean_precision": 0.6485260770975056,
77
+ "euclidean_recall": 0.8387096774193549,
78
+ "hf_subset": "ru",
79
+ "languages": [
80
+ "rus-Cyrl"
81
+ ],
82
+ "main_score": 0.7836372866830557,
83
+ "manhattan_accuracy": 0.7201465201465201,
84
+ "manhattan_accuracy_threshold": 14.288792610168457,
85
+ "manhattan_ap": 0.7836372866830557,
86
+ "manhattan_f1": 0.7294716740929345,
87
+ "manhattan_f1_threshold": 15.687447547912598,
88
+ "manhattan_precision": 0.6445444319460067,
89
+ "manhattan_recall": 0.8401759530791789,
90
+ "max_ap": 0.7836372866830557,
91
+ "max_f1": 0.7314578005115089,
92
+ "max_precision": 0.6485260770975056,
93
+ "max_recall": 0.8401759530791789,
94
+ "similarity_accuracy": 0.7201465201465201,
95
+ "similarity_accuracy_threshold": 0.7796648740768433,
96
+ "similarity_ap": 0.7832647958326411,
97
+ "similarity_f1": 0.7314578005115089,
98
+ "similarity_f1_threshold": 0.7443017363548279,
99
+ "similarity_precision": 0.6485260770975056,
100
+ "similarity_recall": 0.8387096774193549
101
+ }
102
+ ]
103
+ },
104
+ "task_name": "XNLI"
105
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/XNLIV2.json ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "5b7d477a8c62cdd18e2fed7e015497c20b4371ad",
3
+ "evaluation_time": 3.7518317699432373,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.85",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "cosine_accuracy": 0.7142857142857143,
10
+ "cosine_accuracy_threshold": 0.7936503887176514,
11
+ "cosine_ap": 0.7759057952260282,
12
+ "cosine_f1": 0.737249838605552,
13
+ "cosine_f1_threshold": 0.7610313296318054,
14
+ "cosine_precision": 0.6585928489042676,
15
+ "cosine_recall": 0.8372434017595308,
16
+ "dot_accuracy": 0.7142857142857143,
17
+ "dot_accuracy_threshold": 0.7936503887176514,
18
+ "dot_ap": 0.7759057952260282,
19
+ "dot_f1": 0.737249838605552,
20
+ "dot_f1_threshold": 0.7610312700271606,
21
+ "dot_precision": 0.6585928489042676,
22
+ "dot_recall": 0.8372434017595308,
23
+ "euclidean_accuracy": 0.7142857142857143,
24
+ "euclidean_accuracy_threshold": 0.6424167156219482,
25
+ "euclidean_ap": 0.7759057952260282,
26
+ "euclidean_f1": 0.737249838605552,
27
+ "euclidean_f1_threshold": 0.691330075263977,
28
+ "euclidean_precision": 0.6585928489042676,
29
+ "euclidean_recall": 0.8372434017595308,
30
+ "hf_subset": "russian",
31
+ "languages": [
32
+ "rus-Cyrl"
33
+ ],
34
+ "main_score": 0.7759057952260282,
35
+ "manhattan_accuracy": 0.7135531135531136,
36
+ "manhattan_accuracy_threshold": 14.266500473022461,
37
+ "manhattan_ap": 0.7752490582883251,
38
+ "manhattan_f1": 0.7367724867724867,
39
+ "manhattan_f1_threshold": 14.916580200195312,
40
+ "manhattan_precision": 0.6710843373493975,
41
+ "manhattan_recall": 0.8167155425219942,
42
+ "max_ap": 0.7759057952260282,
43
+ "max_f1": 0.737249838605552,
44
+ "max_precision": 0.6710843373493975,
45
+ "max_recall": 0.8372434017595308,
46
+ "similarity_accuracy": 0.7142857142857143,
47
+ "similarity_accuracy_threshold": 0.7936502695083618,
48
+ "similarity_ap": 0.7759057952260282,
49
+ "similarity_f1": 0.737249838605552,
50
+ "similarity_f1_threshold": 0.761031448841095,
51
+ "similarity_precision": 0.6585928489042676,
52
+ "similarity_recall": 0.8372434017595308
53
+ }
54
+ ]
55
+ },
56
+ "task_name": "XNLIV2"
57
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/XQuADRetrieval.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "51adfef1c1287aab1d2d91b5bead9bcfb9c68583",
3
+ "evaluation_time": 3.288357734680176,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.85",
6
+ "scores": {
7
+ "validation": [
8
+ {
9
+ "hf_subset": "ru",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.94245,
14
+ "map_at_1": 0.89367,
15
+ "map_at_10": 0.92756,
16
+ "map_at_100": 0.92824,
17
+ "map_at_1000": 0.92824,
18
+ "map_at_20": 0.92803,
19
+ "map_at_3": 0.92166,
20
+ "map_at_5": 0.92584,
21
+ "mrr_at_1": 0.8936708860759494,
22
+ "mrr_at_10": 0.9275647980711272,
23
+ "mrr_at_100": 0.9282380679451204,
24
+ "mrr_at_1000": 0.9282380679451204,
25
+ "mrr_at_20": 0.9280263758744772,
26
+ "mrr_at_3": 0.9216596343178622,
27
+ "mrr_at_5": 0.9258368495077359,
28
+ "nauc_map_at_1000_diff1": 0.8963508401623788,
29
+ "nauc_map_at_1000_max": 0.6751253729245389,
30
+ "nauc_map_at_1000_std": 0.2922080823763396,
31
+ "nauc_map_at_100_diff1": 0.8963508401623788,
32
+ "nauc_map_at_100_max": 0.6751253729245389,
33
+ "nauc_map_at_100_std": 0.2922080823763396,
34
+ "nauc_map_at_10_diff1": 0.8967237832713173,
35
+ "nauc_map_at_10_max": 0.6778172107527979,
36
+ "nauc_map_at_10_std": 0.29676795180610366,
37
+ "nauc_map_at_1_diff1": 0.8999968963191546,
38
+ "nauc_map_at_1_max": 0.6451620307877981,
39
+ "nauc_map_at_1_std": 0.256254202432371,
40
+ "nauc_map_at_20_diff1": 0.896361313401608,
41
+ "nauc_map_at_20_max": 0.6758783905966789,
42
+ "nauc_map_at_20_std": 0.29317083091078117,
43
+ "nauc_map_at_3_diff1": 0.8964309879756237,
44
+ "nauc_map_at_3_max": 0.6739180486194329,
45
+ "nauc_map_at_3_std": 0.289770472577566,
46
+ "nauc_map_at_5_diff1": 0.8961598478131729,
47
+ "nauc_map_at_5_max": 0.6830189090354628,
48
+ "nauc_map_at_5_std": 0.29780893563339006,
49
+ "nauc_mrr_at_1000_diff1": 0.8963508401623788,
50
+ "nauc_mrr_at_1000_max": 0.6751253729245389,
51
+ "nauc_mrr_at_1000_std": 0.2922080823763396,
52
+ "nauc_mrr_at_100_diff1": 0.8963508401623788,
53
+ "nauc_mrr_at_100_max": 0.6751253729245389,
54
+ "nauc_mrr_at_100_std": 0.2922080823763396,
55
+ "nauc_mrr_at_10_diff1": 0.8967237832713173,
56
+ "nauc_mrr_at_10_max": 0.6778172107527979,
57
+ "nauc_mrr_at_10_std": 0.29676795180610366,
58
+ "nauc_mrr_at_1_diff1": 0.8999968963191546,
59
+ "nauc_mrr_at_1_max": 0.6451620307877981,
60
+ "nauc_mrr_at_1_std": 0.256254202432371,
61
+ "nauc_mrr_at_20_diff1": 0.896361313401608,
62
+ "nauc_mrr_at_20_max": 0.6758783905966789,
63
+ "nauc_mrr_at_20_std": 0.29317083091078117,
64
+ "nauc_mrr_at_3_diff1": 0.8964309879756237,
65
+ "nauc_mrr_at_3_max": 0.6739180486194329,
66
+ "nauc_mrr_at_3_std": 0.289770472577566,
67
+ "nauc_mrr_at_5_diff1": 0.8961598478131729,
68
+ "nauc_mrr_at_5_max": 0.6830189090354628,
69
+ "nauc_mrr_at_5_std": 0.29780893563339006,
70
+ "nauc_ndcg_at_1000_diff1": 0.8961186325219601,
71
+ "nauc_ndcg_at_1000_max": 0.6801931349393009,
72
+ "nauc_ndcg_at_1000_std": 0.29943843250279817,
73
+ "nauc_ndcg_at_100_diff1": 0.8961186325219601,
74
+ "nauc_ndcg_at_100_max": 0.6801931349393009,
75
+ "nauc_ndcg_at_100_std": 0.29943843250279817,
76
+ "nauc_ndcg_at_10_diff1": 0.8972603199267384,
77
+ "nauc_ndcg_at_10_max": 0.6935509207628758,
78
+ "nauc_ndcg_at_10_std": 0.32226540598349396,
79
+ "nauc_ndcg_at_1_diff1": 0.8999968963191546,
80
+ "nauc_ndcg_at_1_max": 0.6451620307877981,
81
+ "nauc_ndcg_at_1_std": 0.256254202432371,
82
+ "nauc_ndcg_at_20_diff1": 0.8959001339429841,
83
+ "nauc_ndcg_at_20_max": 0.6855391298380723,
84
+ "nauc_ndcg_at_20_std": 0.30703372052376493,
85
+ "nauc_ndcg_at_3_diff1": 0.8964526400850794,
86
+ "nauc_ndcg_at_3_max": 0.6867021010165624,
87
+ "nauc_ndcg_at_3_std": 0.3066532919483033,
88
+ "nauc_ndcg_at_5_diff1": 0.8958898354363005,
89
+ "nauc_ndcg_at_5_max": 0.7080698661179543,
90
+ "nauc_ndcg_at_5_std": 0.3255636299587615,
91
+ "nauc_precision_at_1000_diff1": 1.0,
92
+ "nauc_precision_at_1000_max": 1.0,
93
+ "nauc_precision_at_1000_std": 1.0,
94
+ "nauc_precision_at_100_diff1": 1.0,
95
+ "nauc_precision_at_100_max": 1.0,
96
+ "nauc_precision_at_100_std": 1.0,
97
+ "nauc_precision_at_10_diff1": 0.9109833975227343,
98
+ "nauc_precision_at_10_max": 0.9533365361139724,
99
+ "nauc_precision_at_10_std": 0.7690042637847995,
100
+ "nauc_precision_at_1_diff1": 0.8999968963191546,
101
+ "nauc_precision_at_1_max": 0.6451620307877981,
102
+ "nauc_precision_at_1_std": 0.256254202432371,
103
+ "nauc_precision_at_20_diff1": 0.8803059695156299,
104
+ "nauc_precision_at_20_max": 0.9253384577823769,
105
+ "nauc_precision_at_20_std": 0.6633027093911611,
106
+ "nauc_precision_at_3_diff1": 0.8974468324697066,
107
+ "nauc_precision_at_3_max": 0.7528970375541977,
108
+ "nauc_precision_at_3_std": 0.3951595879982966,
109
+ "nauc_precision_at_5_diff1": 0.8949000719246943,
110
+ "nauc_precision_at_5_max": 0.9337130736621487,
111
+ "nauc_precision_at_5_std": 0.5741400310849488,
112
+ "nauc_recall_at_1000_diff1": NaN,
113
+ "nauc_recall_at_1000_max": NaN,
114
+ "nauc_recall_at_1000_std": NaN,
115
+ "nauc_recall_at_100_diff1": NaN,
116
+ "nauc_recall_at_100_max": NaN,
117
+ "nauc_recall_at_100_std": NaN,
118
+ "nauc_recall_at_10_diff1": 0.9109833975227452,
119
+ "nauc_recall_at_10_max": 0.9533365361139833,
120
+ "nauc_recall_at_10_std": 0.7690042637847961,
121
+ "nauc_recall_at_1_diff1": 0.8999968963191546,
122
+ "nauc_recall_at_1_max": 0.6451620307877981,
123
+ "nauc_recall_at_1_std": 0.256254202432371,
124
+ "nauc_recall_at_20_diff1": 0.8803059695156423,
125
+ "nauc_recall_at_20_max": 0.9253384577823777,
126
+ "nauc_recall_at_20_std": 0.663302709391187,
127
+ "nauc_recall_at_3_diff1": 0.8974468324697096,
128
+ "nauc_recall_at_3_max": 0.7528970375541931,
129
+ "nauc_recall_at_3_std": 0.39515958799829815,
130
+ "nauc_recall_at_5_diff1": 0.8949000719246955,
131
+ "nauc_recall_at_5_max": 0.9337130736621473,
132
+ "nauc_recall_at_5_std": 0.5741400310849483,
133
+ "ndcg_at_1": 0.89367,
134
+ "ndcg_at_10": 0.94245,
135
+ "ndcg_at_100": 0.94522,
136
+ "ndcg_at_1000": 0.94522,
137
+ "ndcg_at_20": 0.94401,
138
+ "ndcg_at_3": 0.93073,
139
+ "ndcg_at_5": 0.93832,
140
+ "precision_at_1": 0.89367,
141
+ "precision_at_10": 0.09882,
142
+ "precision_at_100": 0.01,
143
+ "precision_at_1000": 0.001,
144
+ "precision_at_20": 0.0497,
145
+ "precision_at_3": 0.31899,
146
+ "precision_at_5": 0.19511,
147
+ "recall_at_1": 0.89367,
148
+ "recall_at_10": 0.98819,
149
+ "recall_at_100": 1.0,
150
+ "recall_at_1000": 1.0,
151
+ "recall_at_20": 0.99409,
152
+ "recall_at_3": 0.95696,
153
+ "recall_at_5": 0.97553
154
+ }
155
+ ]
156
+ },
157
+ "task_name": "XQuADRetrieval"
158
+ }
results/LaBSE-ru-turbo/1940b046c6b5e125df11722b899130329d0a46da/model_meta.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"name": "sergeyzh/LaBSE-ru-turbo", "revision": "1940b046c6b5e125df11722b899130329d0a46da", "release_date": "2024-06-27", "languages": ["rus_Cyrl"], "n_parameters": null, "memory_usage": null, "max_tokens": null, "embed_dim": null, "license": null, "open_source": true, "similarity_fn_name": null, "framework": [], "loader": null}
results/LaBSE/e34fab64a3011d2176c99545a93d5cbddc9a91b7/GPUSpeedTask.json ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "1.0",
3
+ "evaluation_time": 1.251234769821167,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.49",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "architecture": "x86_64",
10
+ "avg_words_per_sec": 21264.25362721162,
11
+ "gpu_info": [
12
+ {
13
+ "gpu_name": "Quadro RTX 8000",
14
+ "gpu_total_memory": "48.0 GB"
15
+ },
16
+ {
17
+ "gpu_name": "Quadro RTX 8000",
18
+ "gpu_total_memory": "48.0 GB"
19
+ },
20
+ {
21
+ "gpu_name": "Quadro RTX 8000",
22
+ "gpu_total_memory": "48.0 GB"
23
+ }
24
+ ],
25
+ "hf_subset": "default",
26
+ "languages": [
27
+ "eng-Latn"
28
+ ],
29
+ "main_score": 21264.25362721162,
30
+ "num_gpus": 3,
31
+ "physical_cores": 36,
32
+ "platform": "Linux",
33
+ "platform_release": "4.15.0-213-generic",
34
+ "platform_version": "#224-Ubuntu SMP Mon Jun 19 13:30:12 UTC 2023",
35
+ "processor": "x86_64",
36
+ "ram": "504 GB",
37
+ "time_mean": 0.1708501066480364,
38
+ "time_std": 0.026567902896011263,
39
+ "timings": [
40
+ 0.2358858585357666,
41
+ 0.16089916229248047,
42
+ 0.1604166030883789,
43
+ 0.16066360473632812,
44
+ 0.16028976440429688,
45
+ 0.1577913761138916,
46
+ 0.1600043773651123
47
+ ],
48
+ "total_cores": 72
49
+ }
50
+ ]
51
+ },
52
+ "task_name": "GPUSpeedTask"
53
+ }
results/LaBSE/e34fab64a3011d2176c99545a93d5cbddc9a91b7/LanguageClassification.json ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "aa56583bf2bc52b0565770607d6fc3faebecf9e2",
3
+ "evaluation_time": 63.60920691490173,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.89",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.203466796875,
10
+ "f1": 0.18986092893427242,
11
+ "f1_weighted": 0.18994521375727724,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "ara-Arab",
15
+ "bul-Cyrl",
16
+ "deu-Latn",
17
+ "ell-Grek",
18
+ "eng-Latn",
19
+ "spa-Latn",
20
+ "fra-Latn",
21
+ "hin-Deva",
22
+ "ita-Latn",
23
+ "jpn-Jpan",
24
+ "nld-Latn",
25
+ "pol-Latn",
26
+ "por-Latn",
27
+ "rus-Cyrl",
28
+ "swa-Latn",
29
+ "tha-Thai",
30
+ "tur-Latn",
31
+ "urd-Arab",
32
+ "vie-Latn",
33
+ "cmn-Hans"
34
+ ],
35
+ "main_score": 0.203466796875,
36
+ "scores_per_experiment": [
37
+ {
38
+ "accuracy": 0.1875,
39
+ "f1": 0.1754231978439386,
40
+ "f1_weighted": 0.17552455400636058
41
+ },
42
+ {
43
+ "accuracy": 0.19970703125,
44
+ "f1": 0.18705088050880767,
45
+ "f1_weighted": 0.18720765777479115
46
+ },
47
+ {
48
+ "accuracy": 0.2236328125,
49
+ "f1": 0.206453715263396,
50
+ "f1_weighted": 0.2065588921279185
51
+ },
52
+ {
53
+ "accuracy": 0.19873046875,
54
+ "f1": 0.18752174193130244,
55
+ "f1_weighted": 0.18759353262489273
56
+ },
57
+ {
58
+ "accuracy": 0.19775390625,
59
+ "f1": 0.18333077402533657,
60
+ "f1_weighted": 0.18345094855195762
61
+ },
62
+ {
63
+ "accuracy": 0.21826171875,
64
+ "f1": 0.20437450548200137,
65
+ "f1_weighted": 0.20449305053610223
66
+ },
67
+ {
68
+ "accuracy": 0.19970703125,
69
+ "f1": 0.18723713082847426,
70
+ "f1_weighted": 0.1872284444982756
71
+ },
72
+ {
73
+ "accuracy": 0.20458984375,
74
+ "f1": 0.19343110339127012,
75
+ "f1_weighted": 0.19351811720741663
76
+ },
77
+ {
78
+ "accuracy": 0.20556640625,
79
+ "f1": 0.19125254151952373,
80
+ "f1_weighted": 0.1913390886356093
81
+ },
82
+ {
83
+ "accuracy": 0.19921875,
84
+ "f1": 0.18253369854867338,
85
+ "f1_weighted": 0.18253785160944827
86
+ }
87
+ ]
88
+ }
89
+ ]
90
+ },
91
+ "task_name": "LanguageClassification"
92
+ }
results/LaBSE/e34fab64a3011d2176c99545a93d5cbddc9a91b7/MLSUMClusteringP2P.json ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "b5d54f8f3b61ae17845046286940f03c6bc79bc7",
3
+ "evaluation_time": 15.01644515991211,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.12.89",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "ru",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.3944833816221668,
14
+ "v_measure": 0.3944833816221668,
15
+ "v_measure_std": 0.052120220246551496,
16
+ "v_measures": [
17
+ 0.4466001456129652,
18
+ 0.44600850092012306,
19
+ 0.38375379723280495,
20
+ 0.31369002834150056,
21
+ 0.3371789914999591,
22
+ 0.435981012248182,
23
+ 0.4817689462989566,
24
+ 0.3570899599093,
25
+ 0.36585019610231717,
26
+ 0.3769122380555593
27
+ ]
28
+ }
29
+ ],
30
+ "validation": [
31
+ {
32
+ "hf_subset": "ru",
33
+ "languages": [
34
+ "rus-Cyrl"
35
+ ],
36
+ "main_score": 0.3716638443186714,
37
+ "v_measure": 0.3716638443186714,
38
+ "v_measure_std": 0.035223383346560895,
39
+ "v_measures": [
40
+ 0.41892108167362035,
41
+ 0.37711425960140904,
42
+ 0.3233577377958012,
43
+ 0.31255477841457374,
44
+ 0.4043770371058953,
45
+ 0.3692782031077736,
46
+ 0.41389528025889694,
47
+ 0.33722424374696275,
48
+ 0.37178647806075626,
49
+ 0.38812934342102445
50
+ ]
51
+ }
52
+ ]
53
+ },
54
+ "task_name": "MLSUMClusteringP2P"
55
+ }