Datasets:
mteb
/

vatolinalex commited on
Commit
6fbbd2e
·
unverified ·
1 Parent(s): 71d8e5f

Add RuMTEB evaluation scores for most english top MTEB models (#76)

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/CEDRClassification.json +73 -0
  2. results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/GeoreviewClassification.json +73 -0
  3. results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/GeoreviewClusteringP2P.json +34 -0
  4. results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/HeadlineClassification.json +73 -0
  5. results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/InappropriatenessClassification.json +95 -0
  6. results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/KinopoiskClassification.json +73 -0
  7. results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/MIRACLReranking.json +130 -0
  8. results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/MassiveIntentClassification.json +137 -0
  9. results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/MassiveScenarioClassification.json +137 -0
  10. results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/RUParaPhraserSTS.json +26 -0
  11. results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/RiaNewsRetrieval.json +158 -0
  12. results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/RuBQReranking.json +26 -0
  13. results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/RuBQRetrieval.json +158 -0
  14. results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/RuReviewsClassification.json +73 -0
  15. results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/RuSTSBenchmarkSTS.json +26 -0
  16. results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/RuSciBenchGRNTIClassification.json +73 -0
  17. results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/RuSciBenchGRNTIClusteringP2P.json +34 -0
  18. results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/RuSciBenchOECDClassification.json +73 -0
  19. results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/RuSciBenchOECDClusteringP2P.json +34 -0
  20. results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/STS22.json +26 -0
  21. results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/SensitiveTopicsClassification.json +73 -0
  22. results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/TERRa.json +58 -0
  23. results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/model_meta.json +1 -0
  24. results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/CEDRClassification.json +73 -0
  25. results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/GeoreviewClassification.json +73 -0
  26. results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/GeoreviewClusteringP2P.json +34 -0
  27. results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/HeadlineClassification.json +73 -0
  28. results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/InappropriatenessClassification.json +95 -0
  29. results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/KinopoiskClassification.json +73 -0
  30. results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/MIRACLReranking.json +130 -0
  31. results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/MassiveIntentClassification.json +137 -0
  32. results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/MassiveScenarioClassification.json +137 -0
  33. results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/RUParaPhraserSTS.json +26 -0
  34. results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/RiaNewsRetrieval.json +158 -0
  35. results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/RuBQReranking.json +26 -0
  36. results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/RuBQRetrieval.json +158 -0
  37. results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/RuReviewsClassification.json +73 -0
  38. results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/RuSTSBenchmarkSTS.json +26 -0
  39. results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/RuSciBenchGRNTIClassification.json +73 -0
  40. results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/RuSciBenchGRNTIClusteringP2P.json +34 -0
  41. results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/RuSciBenchOECDClassification.json +73 -0
  42. results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/RuSciBenchOECDClusteringP2P.json +34 -0
  43. results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/STS22.json +26 -0
  44. results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/SensitiveTopicsClassification.json +73 -0
  45. results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/TERRa.json +58 -0
  46. results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/model_meta.json +1 -0
  47. results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/CEDRClassification.json +34 -34
  48. results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/GeoreviewClassification.json +73 -0
  49. results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/GeoreviewClusteringP2P.json +34 -0
  50. results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/HeadlineClassification.json +73 -0
results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/CEDRClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "c0ba03d058e3e1b2f3fd20518875a4563dd12db4",
3
+ "evaluation_time": 49.17470383644104,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.5477151965993624,
10
+ "f1": 0.6036500719599494,
11
+ "hf_subset": "default",
12
+ "languages": [
13
+ "rus-Cyrl"
14
+ ],
15
+ "lrap": 0.8649468650371996,
16
+ "main_score": 0.5477151965993624,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.5451647183846972,
20
+ "f1": 0.5850362532892082,
21
+ "lrap": 0.8438363443145652
22
+ },
23
+ {
24
+ "accuracy": 0.628586609989373,
25
+ "f1": 0.6422497078024308,
26
+ "lrap": 0.8565887353878912
27
+ },
28
+ {
29
+ "accuracy": 0.5371944739638682,
30
+ "f1": 0.602381004950392,
31
+ "lrap": 0.8797555791710995
32
+ },
33
+ {
34
+ "accuracy": 0.5143464399574921,
35
+ "f1": 0.5742093596923625,
36
+ "lrap": 0.8505844845908667
37
+ },
38
+ {
39
+ "accuracy": 0.5446333687566419,
40
+ "f1": 0.6001832009513983,
41
+ "lrap": 0.8803400637619601
42
+ },
43
+ {
44
+ "accuracy": 0.5712008501594049,
45
+ "f1": 0.6201225698955309,
46
+ "lrap": 0.8817215727949038
47
+ },
48
+ {
49
+ "accuracy": 0.502125398512221,
50
+ "f1": 0.5781829471349942,
51
+ "lrap": 0.8520191285866153
52
+ },
53
+ {
54
+ "accuracy": 0.5371944739638682,
55
+ "f1": 0.6032409719825884,
56
+ "lrap": 0.8667375132837453
57
+ },
58
+ {
59
+ "accuracy": 0.5749202975557917,
60
+ "f1": 0.622622514090637,
61
+ "lrap": 0.8739107332624917
62
+ },
63
+ {
64
+ "accuracy": 0.5217853347502657,
65
+ "f1": 0.6082721898099509,
66
+ "lrap": 0.8639744952178583
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "CEDRClassification"
73
+ }
results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/GeoreviewClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "3765c0d1de6b7d264bc459433c45e5a75513839c",
3
+ "evaluation_time": 70.92497396469116,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.593115234375,
10
+ "f1": 0.5654370923929647,
11
+ "f1_weighted": 0.565366043274544,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.593115234375,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.57275390625,
20
+ "f1": 0.5311404233477083,
21
+ "f1_weighted": 0.5310401242904998
22
+ },
23
+ {
24
+ "accuracy": 0.5693359375,
25
+ "f1": 0.5233095126267937,
26
+ "f1_weighted": 0.5231911107096815
27
+ },
28
+ {
29
+ "accuracy": 0.59619140625,
30
+ "f1": 0.5690305785888471,
31
+ "f1_weighted": 0.5689581598276597
32
+ },
33
+ {
34
+ "accuracy": 0.60595703125,
35
+ "f1": 0.5901413093480931,
36
+ "f1_weighted": 0.590105109669658
37
+ },
38
+ {
39
+ "accuracy": 0.6025390625,
40
+ "f1": 0.5729259523761623,
41
+ "f1_weighted": 0.5728395007156636
42
+ },
43
+ {
44
+ "accuracy": 0.5947265625,
45
+ "f1": 0.5692650108462612,
46
+ "f1_weighted": 0.569206137591517
47
+ },
48
+ {
49
+ "accuracy": 0.60693359375,
50
+ "f1": 0.5888821876084315,
51
+ "f1_weighted": 0.5888208744862033
52
+ },
53
+ {
54
+ "accuracy": 0.58935546875,
55
+ "f1": 0.55793654500316,
56
+ "f1_weighted": 0.5578614223389805
57
+ },
58
+ {
59
+ "accuracy": 0.60302734375,
60
+ "f1": 0.5833953830631733,
61
+ "f1_weighted": 0.5833356693161511
62
+ },
63
+ {
64
+ "accuracy": 0.59033203125,
65
+ "f1": 0.5683440211210167,
66
+ "f1_weighted": 0.5683023237994256
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "GeoreviewClassification"
73
+ }
results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/GeoreviewClusteringP2P.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "97a313c8fc85b47f13f33e7e9a95c1ad888c7fec",
3
+ "evaluation_time": 106.16331720352173,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.7490333015485914,
14
+ "v_measure": 0.7490333015485914,
15
+ "v_measure_std": 0.0034579399932174047,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.752744253546754,
19
+ 0.7440034439559667,
20
+ 0.7466405375270816,
21
+ 0.7504486865332559,
22
+ 0.74656634640751,
23
+ 0.750591472356923,
24
+ 0.7485759760938262,
25
+ 0.7532063350425257,
26
+ 0.7439558077953369,
27
+ 0.7536001562267337
28
+ ]
29
+ }
30
+ }
31
+ ]
32
+ },
33
+ "task_name": "GeoreviewClusteringP2P"
34
+ }
results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/HeadlineClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "2fe05ee6b5832cda29f2ef7aaad7b7fe6a3609eb",
3
+ "evaluation_time": 41.733545541763306,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.823193359375,
10
+ "f1": 0.8239465084764503,
11
+ "f1_weighted": 0.8239283463460406,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.823193359375,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.80712890625,
20
+ "f1": 0.8089034546914284,
21
+ "f1_weighted": 0.8088769259452948
22
+ },
23
+ {
24
+ "accuracy": 0.8349609375,
25
+ "f1": 0.8357485899262026,
26
+ "f1_weighted": 0.8357459144685893
27
+ },
28
+ {
29
+ "accuracy": 0.81591796875,
30
+ "f1": 0.8175058255173461,
31
+ "f1_weighted": 0.8174854105287481
32
+ },
33
+ {
34
+ "accuracy": 0.81640625,
35
+ "f1": 0.8184209189049058,
36
+ "f1_weighted": 0.8184018752116653
37
+ },
38
+ {
39
+ "accuracy": 0.81494140625,
40
+ "f1": 0.816025755172741,
41
+ "f1_weighted": 0.8160000608774359
42
+ },
43
+ {
44
+ "accuracy": 0.8369140625,
45
+ "f1": 0.8376501552762083,
46
+ "f1_weighted": 0.8376405057751175
47
+ },
48
+ {
49
+ "accuracy": 0.82080078125,
50
+ "f1": 0.8200892484658041,
51
+ "f1_weighted": 0.8200637301606719
52
+ },
53
+ {
54
+ "accuracy": 0.814453125,
55
+ "f1": 0.8152234211475688,
56
+ "f1_weighted": 0.8151917129263095
57
+ },
58
+ {
59
+ "accuracy": 0.82958984375,
60
+ "f1": 0.8291598309049161,
61
+ "f1_weighted": 0.8291522960195152
62
+ },
63
+ {
64
+ "accuracy": 0.8408203125,
65
+ "f1": 0.840737884757382,
66
+ "f1_weighted": 0.84072503154706
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "HeadlineClassification"
73
+ }
results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/InappropriatenessClassification.json ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "601651fdc45ef243751676e62dd7a19f491c0285",
3
+ "evaluation_time": 37.14429306983948,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.732568359375,
10
+ "ap": 0.6710765411408649,
11
+ "ap_weighted": 0.6710765411408649,
12
+ "f1": 0.730367241384523,
13
+ "f1_weighted": 0.730367241384523,
14
+ "hf_subset": "default",
15
+ "languages": [
16
+ "rus-Cyrl"
17
+ ],
18
+ "main_score": 0.732568359375,
19
+ "scores_per_experiment": [
20
+ {
21
+ "accuracy": 0.73291015625,
22
+ "ap": 0.678244924916574,
23
+ "ap_weighted": 0.678244924916574,
24
+ "f1": 0.7319114485702299,
25
+ "f1_weighted": 0.7319114485702299
26
+ },
27
+ {
28
+ "accuracy": 0.7568359375,
29
+ "ap": 0.696101186748497,
30
+ "ap_weighted": 0.696101186748497,
31
+ "f1": 0.7567967401972707,
32
+ "f1_weighted": 0.7567967401972707
33
+ },
34
+ {
35
+ "accuracy": 0.75341796875,
36
+ "ap": 0.6911183505754163,
37
+ "ap_weighted": 0.6911183505754163,
38
+ "f1": 0.753417439641227,
39
+ "f1_weighted": 0.753417439641227
40
+ },
41
+ {
42
+ "accuracy": 0.705078125,
43
+ "ap": 0.6430150082236842,
44
+ "ap_weighted": 0.6430150082236842,
45
+ "f1": 0.7049655782998275,
46
+ "f1_weighted": 0.7049655782998275
47
+ },
48
+ {
49
+ "accuracy": 0.7255859375,
50
+ "ap": 0.672415796624714,
51
+ "ap_weighted": 0.672415796624714,
52
+ "f1": 0.7241059263570389,
53
+ "f1_weighted": 0.7241059263570389
54
+ },
55
+ {
56
+ "accuracy": 0.6845703125,
57
+ "ap": 0.6162109375,
58
+ "ap_weighted": 0.6162109375,
59
+ "f1": 0.6697390979613315,
60
+ "f1_weighted": 0.6697390979613315
61
+ },
62
+ {
63
+ "accuracy": 0.77001953125,
64
+ "ap": 0.7068677063282002,
65
+ "ap_weighted": 0.7068677063282002,
66
+ "f1": 0.7700071934744195,
67
+ "f1_weighted": 0.7700071934744195
68
+ },
69
+ {
70
+ "accuracy": 0.75,
71
+ "ap": 0.675314465408805,
72
+ "ap_weighted": 0.675314465408805,
73
+ "f1": 0.7462795199380565,
74
+ "f1_weighted": 0.7462795199380565
75
+ },
76
+ {
77
+ "accuracy": 0.728515625,
78
+ "ap": 0.6725068933823529,
79
+ "ap_weighted": 0.6725068933823529,
80
+ "f1": 0.7277863998385874,
81
+ "f1_weighted": 0.7277863998385874
82
+ },
83
+ {
84
+ "accuracy": 0.71875,
85
+ "ap": 0.6589701417004049,
86
+ "ap_weighted": 0.6589701417004049,
87
+ "f1": 0.7186630695672415,
88
+ "f1_weighted": 0.7186630695672415
89
+ }
90
+ ]
91
+ }
92
+ ]
93
+ },
94
+ "task_name": "InappropriatenessClassification"
95
+ }
results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/KinopoiskClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "5911f26666ac11af46cb9c6849d0dc80a378af24",
3
+ "evaluation_time": 84.22461795806885,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.6716666666666666,
10
+ "f1": 0.6476545663988791,
11
+ "f1_weighted": 0.6476545663988791,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.6716666666666666,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.6746666666666666,
20
+ "f1": 0.64998884298854,
21
+ "f1_weighted": 0.64998884298854
22
+ },
23
+ {
24
+ "accuracy": 0.6273333333333333,
25
+ "f1": 0.5395116792472158,
26
+ "f1_weighted": 0.5395116792472157
27
+ },
28
+ {
29
+ "accuracy": 0.6933333333333334,
30
+ "f1": 0.6851147871880379,
31
+ "f1_weighted": 0.6851147871880379
32
+ },
33
+ {
34
+ "accuracy": 0.6746666666666666,
35
+ "f1": 0.6537567797235678,
36
+ "f1_weighted": 0.6537567797235679
37
+ },
38
+ {
39
+ "accuracy": 0.664,
40
+ "f1": 0.6461389383302979,
41
+ "f1_weighted": 0.6461389383302979
42
+ },
43
+ {
44
+ "accuracy": 0.6786666666666666,
45
+ "f1": 0.6629463945304079,
46
+ "f1_weighted": 0.6629463945304079
47
+ },
48
+ {
49
+ "accuracy": 0.6693333333333333,
50
+ "f1": 0.6561239246851377,
51
+ "f1_weighted": 0.6561239246851377
52
+ },
53
+ {
54
+ "accuracy": 0.6746666666666666,
55
+ "f1": 0.664298738939659,
56
+ "f1_weighted": 0.664298738939659
57
+ },
58
+ {
59
+ "accuracy": 0.678,
60
+ "f1": 0.6493604358600386,
61
+ "f1_weighted": 0.6493604358600387
62
+ },
63
+ {
64
+ "accuracy": 0.682,
65
+ "f1": 0.6693051424958885,
66
+ "f1_weighted": 0.6693051424958885
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "KinopoiskClassification"
73
+ }
results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/MIRACLReranking.json ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "6d1962c527217f8927fca80f890f14f36b2802af",
3
+ "evaluation_time": 3058.513674020767,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "dev": [
8
+ {
9
+ "MAP@1(MIRACL)": 0.30839,
10
+ "MAP@10(MIRACL)": 0.47796,
11
+ "MAP@100(MIRACL)": 0.49908,
12
+ "MAP@1000(MIRACL)": 0.49908,
13
+ "MAP@20(MIRACL)": 0.49089,
14
+ "MAP@3(MIRACL)": 0.41929,
15
+ "MAP@5(MIRACL)": 0.44976,
16
+ "NDCG@1(MIRACL)": 0.50682,
17
+ "NDCG@10(MIRACL)": 0.55338,
18
+ "NDCG@100(MIRACL)": 0.60257,
19
+ "NDCG@1000(MIRACL)": 0.60257,
20
+ "NDCG@20(MIRACL)": 0.57749,
21
+ "NDCG@3(MIRACL)": 0.50141,
22
+ "NDCG@5(MIRACL)": 0.51849,
23
+ "P@1(MIRACL)": 0.50682,
24
+ "P@10(MIRACL)": 0.13978,
25
+ "P@100(MIRACL)": 0.01953,
26
+ "P@1000(MIRACL)": 0.00195,
27
+ "P@20(MIRACL)": 0.0816,
28
+ "P@3(MIRACL)": 0.30446,
29
+ "P@5(MIRACL)": 0.22326,
30
+ "Recall@1(MIRACL)": 0.30839,
31
+ "Recall@10(MIRACL)": 0.64404,
32
+ "Recall@100(MIRACL)": 0.79952,
33
+ "Recall@1000(MIRACL)": 0.79952,
34
+ "Recall@20(MIRACL)": 0.70976,
35
+ "Recall@3(MIRACL)": 0.47999,
36
+ "Recall@5(MIRACL)": 0.55208,
37
+ "hf_subset": "ru",
38
+ "languages": [
39
+ "rus-Cyrl"
40
+ ],
41
+ "main_score": 0.55338,
42
+ "nAUC_MAP@1000_diff1(MIRACL)": 0.2089962442210562,
43
+ "nAUC_MAP@1000_max(MIRACL)": 0.2919003259842182,
44
+ "nAUC_MAP@1000_std(MIRACL)": 0.11313653899226035,
45
+ "nAUC_MAP@100_diff1(MIRACL)": 0.2089962442210562,
46
+ "nAUC_MAP@100_max(MIRACL)": 0.2919003259842182,
47
+ "nAUC_MAP@100_std(MIRACL)": 0.11313653899226035,
48
+ "nAUC_MAP@10_diff1(MIRACL)": 0.222888454995082,
49
+ "nAUC_MAP@10_max(MIRACL)": 0.26810830243952344,
50
+ "nAUC_MAP@10_std(MIRACL)": 0.10456851487569355,
51
+ "nAUC_MAP@1_diff1(MIRACL)": 0.329830297887621,
52
+ "nAUC_MAP@1_max(MIRACL)": 0.17126033256438405,
53
+ "nAUC_MAP@1_std(MIRACL)": 0.011921962991838076,
54
+ "nAUC_MAP@20_diff1(MIRACL)": 0.21525905527375686,
55
+ "nAUC_MAP@20_max(MIRACL)": 0.28305684187493246,
56
+ "nAUC_MAP@20_std(MIRACL)": 0.11348945318590824,
57
+ "nAUC_MAP@3_diff1(MIRACL)": 0.24982718126885262,
58
+ "nAUC_MAP@3_max(MIRACL)": 0.2222827611042665,
59
+ "nAUC_MAP@3_std(MIRACL)": 0.0686188171932736,
60
+ "nAUC_MAP@5_diff1(MIRACL)": 0.23695295846406364,
61
+ "nAUC_MAP@5_max(MIRACL)": 0.24665776208873202,
62
+ "nAUC_MAP@5_std(MIRACL)": 0.08700740306625203,
63
+ "nAUC_NDCG@1000_diff1(MIRACL)": 0.1404888787431995,
64
+ "nAUC_NDCG@1000_max(MIRACL)": 0.3670312011441587,
65
+ "nAUC_NDCG@1000_std(MIRACL)": 0.14472410998897423,
66
+ "nAUC_NDCG@100_diff1(MIRACL)": 0.1404888787431995,
67
+ "nAUC_NDCG@100_max(MIRACL)": 0.3670312011441587,
68
+ "nAUC_NDCG@100_std(MIRACL)": 0.14472410998897423,
69
+ "nAUC_NDCG@10_diff1(MIRACL)": 0.18315726445656572,
70
+ "nAUC_NDCG@10_max(MIRACL)": 0.31366687889861145,
71
+ "nAUC_NDCG@10_std(MIRACL)": 0.13224385174174086,
72
+ "nAUC_NDCG@1_diff1(MIRACL)": 0.26296207012200273,
73
+ "nAUC_NDCG@1_max(MIRACL)": 0.36530226031318025,
74
+ "nAUC_NDCG@1_std(MIRACL)": 0.1093290487120926,
75
+ "nAUC_NDCG@20_diff1(MIRACL)": 0.16408309785943495,
76
+ "nAUC_NDCG@20_max(MIRACL)": 0.34197426643090567,
77
+ "nAUC_NDCG@20_std(MIRACL)": 0.14523474760359054,
78
+ "nAUC_NDCG@3_diff1(MIRACL)": 0.20704435696438578,
79
+ "nAUC_NDCG@3_max(MIRACL)": 0.2932234688017299,
80
+ "nAUC_NDCG@3_std(MIRACL)": 0.10967851247525288,
81
+ "nAUC_NDCG@5_diff1(MIRACL)": 0.20267432251237152,
82
+ "nAUC_NDCG@5_max(MIRACL)": 0.30015217521787424,
83
+ "nAUC_NDCG@5_std(MIRACL)": 0.11955390856968096,
84
+ "nAUC_P@1000_diff1(MIRACL)": -0.18581808095830424,
85
+ "nAUC_P@1000_max(MIRACL)": 0.28768119683086585,
86
+ "nAUC_P@1000_std(MIRACL)": 0.11453610966247148,
87
+ "nAUC_P@100_diff1(MIRACL)": -0.18581808095830377,
88
+ "nAUC_P@100_max(MIRACL)": 0.28768119683086624,
89
+ "nAUC_P@100_std(MIRACL)": 0.1145361096624716,
90
+ "nAUC_P@10_diff1(MIRACL)": -0.11490679091021795,
91
+ "nAUC_P@10_max(MIRACL)": 0.303732953660836,
92
+ "nAUC_P@10_std(MIRACL)": 0.1815144896650242,
93
+ "nAUC_P@1_diff1(MIRACL)": 0.26296207012200273,
94
+ "nAUC_P@1_max(MIRACL)": 0.36530226031318025,
95
+ "nAUC_P@1_std(MIRACL)": 0.1093290487120926,
96
+ "nAUC_P@20_diff1(MIRACL)": -0.1469460760354143,
97
+ "nAUC_P@20_max(MIRACL)": 0.3073193969322568,
98
+ "nAUC_P@20_std(MIRACL)": 0.16885272580854835,
99
+ "nAUC_P@3_diff1(MIRACL)": -0.012682613687038172,
100
+ "nAUC_P@3_max(MIRACL)": 0.3248833378603578,
101
+ "nAUC_P@3_std(MIRACL)": 0.1753789292191723,
102
+ "nAUC_P@5_diff1(MIRACL)": -0.06119474522408995,
103
+ "nAUC_P@5_max(MIRACL)": 0.3250797516073804,
104
+ "nAUC_P@5_std(MIRACL)": 0.18667527856903227,
105
+ "nAUC_Recall@1000_diff1(MIRACL)": -0.1336575788083657,
106
+ "nAUC_Recall@1000_max(MIRACL)": 0.5176985694789664,
107
+ "nAUC_Recall@1000_std(MIRACL)": 0.22392826741997982,
108
+ "nAUC_Recall@100_diff1(MIRACL)": -0.1336575788083657,
109
+ "nAUC_Recall@100_max(MIRACL)": 0.5176985694789664,
110
+ "nAUC_Recall@100_std(MIRACL)": 0.22392826741997982,
111
+ "nAUC_Recall@10_diff1(MIRACL)": 0.0926463193056263,
112
+ "nAUC_Recall@10_max(MIRACL)": 0.28291780552434215,
113
+ "nAUC_Recall@10_std(MIRACL)": 0.1497300332187107,
114
+ "nAUC_Recall@1_diff1(MIRACL)": 0.329830297887621,
115
+ "nAUC_Recall@1_max(MIRACL)": 0.17126033256438405,
116
+ "nAUC_Recall@1_std(MIRACL)": 0.011921962991838076,
117
+ "nAUC_Recall@20_diff1(MIRACL)": 0.02226157355604967,
118
+ "nAUC_Recall@20_max(MIRACL)": 0.36952005700517465,
119
+ "nAUC_Recall@20_std(MIRACL)": 0.1946681259929083,
120
+ "nAUC_Recall@3_diff1(MIRACL)": 0.19357536696091276,
121
+ "nAUC_Recall@3_max(MIRACL)": 0.19784137636238455,
122
+ "nAUC_Recall@3_std(MIRACL)": 0.08700031115205505,
123
+ "nAUC_Recall@5_diff1(MIRACL)": 0.1508838319445154,
124
+ "nAUC_Recall@5_max(MIRACL)": 0.23837577186242764,
125
+ "nAUC_Recall@5_std(MIRACL)": 0.1224704068815029
126
+ }
127
+ ]
128
+ },
129
+ "task_name": "MIRACLReranking"
130
+ }
results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/MassiveIntentClassification.json ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "4672e20407010da34463acc759c162ca9734bca6",
3
+ "evaluation_time": 238.89698457717896,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.7314727639542704,
10
+ "f1": 0.7029509613044315,
11
+ "f1_weighted": 0.7256631827900565,
12
+ "hf_subset": "ru",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.7314727639542704,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.7498318762609281,
20
+ "f1": 0.716140430710699,
21
+ "f1_weighted": 0.7451098688894293
22
+ },
23
+ {
24
+ "accuracy": 0.7397444519166106,
25
+ "f1": 0.7096092013926623,
26
+ "f1_weighted": 0.7366102563637507
27
+ },
28
+ {
29
+ "accuracy": 0.7114996637525218,
30
+ "f1": 0.6915641707074874,
31
+ "f1_weighted": 0.7116819647995989
32
+ },
33
+ {
34
+ "accuracy": 0.7528581035642232,
35
+ "f1": 0.7102457063724665,
36
+ "f1_weighted": 0.742370987289208
37
+ },
38
+ {
39
+ "accuracy": 0.7431069266980498,
40
+ "f1": 0.7092041013363598,
41
+ "f1_weighted": 0.7358669124480911
42
+ },
43
+ {
44
+ "accuracy": 0.7078009414929388,
45
+ "f1": 0.6840768024865365,
46
+ "f1_weighted": 0.7085615317675305
47
+ },
48
+ {
49
+ "accuracy": 0.726630800268998,
50
+ "f1": 0.6986013055501684,
51
+ "f1_weighted": 0.7156717667858077
52
+ },
53
+ {
54
+ "accuracy": 0.7246133154001345,
55
+ "f1": 0.6899712907863645,
56
+ "f1_weighted": 0.7155703887651953
57
+ },
58
+ {
59
+ "accuracy": 0.7135171486213854,
60
+ "f1": 0.7004742109283838,
61
+ "f1_weighted": 0.7065636857122223
62
+ },
63
+ {
64
+ "accuracy": 0.7451244115669132,
65
+ "f1": 0.7196223927731854,
66
+ "f1_weighted": 0.7386244650797311
67
+ }
68
+ ]
69
+ }
70
+ ],
71
+ "validation": [
72
+ {
73
+ "accuracy": 0.7371864240039351,
74
+ "f1": 0.6919872164606259,
75
+ "f1_weighted": 0.728897197686264,
76
+ "hf_subset": "ru",
77
+ "languages": [
78
+ "rus-Cyrl"
79
+ ],
80
+ "main_score": 0.7371864240039351,
81
+ "scores_per_experiment": [
82
+ {
83
+ "accuracy": 0.7481554353172651,
84
+ "f1": 0.697706708330012,
85
+ "f1_weighted": 0.741921738803396
86
+ },
87
+ {
88
+ "accuracy": 0.7402852926709297,
89
+ "f1": 0.6929606464768286,
90
+ "f1_weighted": 0.7358916138155063
91
+ },
92
+ {
93
+ "accuracy": 0.735858337432366,
94
+ "f1": 0.6939703259575215,
95
+ "f1_weighted": 0.7307368868035592
96
+ },
97
+ {
98
+ "accuracy": 0.7515986227250369,
99
+ "f1": 0.6975390996102395,
100
+ "f1_weighted": 0.7401302137124002
101
+ },
102
+ {
103
+ "accuracy": 0.7476635514018691,
104
+ "f1": 0.6975070690946991,
105
+ "f1_weighted": 0.736950671981587
106
+ },
107
+ {
108
+ "accuracy": 0.7274963108706345,
109
+ "f1": 0.6854731694412295,
110
+ "f1_weighted": 0.7247630530178875
111
+ },
112
+ {
113
+ "accuracy": 0.7324151500245942,
114
+ "f1": 0.6832147716839522,
115
+ "f1_weighted": 0.7196689872914709
116
+ },
117
+ {
118
+ "accuracy": 0.7284800787014265,
119
+ "f1": 0.6837681985370787,
120
+ "f1_weighted": 0.7181839293197748
121
+ },
122
+ {
123
+ "accuracy": 0.7147073290703394,
124
+ "f1": 0.6814897084521937,
125
+ "f1_weighted": 0.7028530347559647
126
+ },
127
+ {
128
+ "accuracy": 0.7452041318248893,
129
+ "f1": 0.7062424670225043,
130
+ "f1_weighted": 0.7378718473610946
131
+ }
132
+ ]
133
+ }
134
+ ]
135
+ },
136
+ "task_name": "MassiveIntentClassification"
137
+ }
results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/MassiveScenarioClassification.json ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "fad2c6e8459f9e1c45d9315f4953d921437d70f8",
3
+ "evaluation_time": 133.87348747253418,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.7545729657027572,
10
+ "f1": 0.7490898691066853,
11
+ "f1_weighted": 0.7515855273153575,
12
+ "hf_subset": "ru",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.7545729657027572,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.7511768661735037,
20
+ "f1": 0.7456108309189888,
21
+ "f1_weighted": 0.7484723005511253
22
+ },
23
+ {
24
+ "accuracy": 0.7505043712172159,
25
+ "f1": 0.7457814471713863,
26
+ "f1_weighted": 0.7462111693089635
27
+ },
28
+ {
29
+ "accuracy": 0.7636180228648285,
30
+ "f1": 0.754175232640998,
31
+ "f1_weighted": 0.7607174268135466
32
+ },
33
+ {
34
+ "accuracy": 0.7575655682582381,
35
+ "f1": 0.7491067946517629,
36
+ "f1_weighted": 0.7551134220589979
37
+ },
38
+ {
39
+ "accuracy": 0.7363819771351715,
40
+ "f1": 0.7311910589063159,
41
+ "f1_weighted": 0.7306166372787689
42
+ },
43
+ {
44
+ "accuracy": 0.7279757901815737,
45
+ "f1": 0.7275025983852841,
46
+ "f1_weighted": 0.7248716100340834
47
+ },
48
+ {
49
+ "accuracy": 0.7552118359112306,
50
+ "f1": 0.7432017673921737,
51
+ "f1_weighted": 0.7523270597889727
52
+ },
53
+ {
54
+ "accuracy": 0.7457969065232011,
55
+ "f1": 0.7452713354966792,
56
+ "f1_weighted": 0.7453588019062662
57
+ },
58
+ {
59
+ "accuracy": 0.7824478816408877,
60
+ "f1": 0.7765375469569651,
61
+ "f1_weighted": 0.7774774088361666
62
+ },
63
+ {
64
+ "accuracy": 0.7750504371217216,
65
+ "f1": 0.772520078546299,
66
+ "f1_weighted": 0.7746894365766843
67
+ }
68
+ ]
69
+ }
70
+ ],
71
+ "validation": [
72
+ {
73
+ "accuracy": 0.7585833743236596,
74
+ "f1": 0.750976458122237,
75
+ "f1_weighted": 0.7552329606312337,
76
+ "hf_subset": "ru",
77
+ "languages": [
78
+ "rus-Cyrl"
79
+ ],
80
+ "main_score": 0.7585833743236596,
81
+ "scores_per_experiment": [
82
+ {
83
+ "accuracy": 0.7589768814559764,
84
+ "f1": 0.752446272325823,
85
+ "f1_weighted": 0.7567932005533703
86
+ },
87
+ {
88
+ "accuracy": 0.7461878996556812,
89
+ "f1": 0.7413218044474701,
90
+ "f1_weighted": 0.7434603528500205
91
+ },
92
+ {
93
+ "accuracy": 0.7722577471716675,
94
+ "f1": 0.761345597506989,
95
+ "f1_weighted": 0.7688634050629207
96
+ },
97
+ {
98
+ "accuracy": 0.7530742744712248,
99
+ "f1": 0.7441008750798291,
100
+ "f1_weighted": 0.7482848214919452
101
+ },
102
+ {
103
+ "accuracy": 0.7456960157402853,
104
+ "f1": 0.7377843033368218,
105
+ "f1_weighted": 0.7396580823759403
106
+ },
107
+ {
108
+ "accuracy": 0.7309394982784063,
109
+ "f1": 0.7263739502710909,
110
+ "f1_weighted": 0.7269369244142984
111
+ },
112
+ {
113
+ "accuracy": 0.7624200688637481,
114
+ "f1": 0.7487759384438273,
115
+ "f1_weighted": 0.7604143042265185
116
+ },
117
+ {
118
+ "accuracy": 0.735858337432366,
119
+ "f1": 0.7303500540265936,
120
+ "f1_weighted": 0.734421778539582
121
+ },
122
+ {
123
+ "accuracy": 0.7968519429414658,
124
+ "f1": 0.7875421066196677,
125
+ "f1_weighted": 0.7908719752208149
126
+ },
127
+ {
128
+ "accuracy": 0.7835710772257747,
129
+ "f1": 0.7797236791642576,
130
+ "f1_weighted": 0.7826247615769252
131
+ }
132
+ ]
133
+ }
134
+ ]
135
+ },
136
+ "task_name": "MassiveScenarioClassification"
137
+ }
results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/RUParaPhraserSTS.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "43265056790b8f7c59e0139acb4be0a8dad2c8f4",
3
+ "evaluation_time": 52.95315933227539,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "cosine_pearson": 0.6994501757022691,
10
+ "cosine_spearman": 0.7364909320959803,
11
+ "euclidean_pearson": 0.718112255879904,
12
+ "euclidean_spearman": 0.7364909320959803,
13
+ "hf_subset": "default",
14
+ "languages": [
15
+ "rus-Cyrl"
16
+ ],
17
+ "main_score": 0.7364909320959803,
18
+ "manhattan_pearson": 0.7181879721477015,
19
+ "manhattan_spearman": 0.7363332544480308,
20
+ "pearson": 0.6994501757022691,
21
+ "spearman": 0.7364909320959803
22
+ }
23
+ ]
24
+ },
25
+ "task_name": "RUParaPhraserSTS"
26
+ }
results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/RiaNewsRetrieval.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "82374b0bbacda6114f39ff9c5b925fa1512ca5d7",
3
+ "evaluation_time": 28827.79638028145,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.4",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.61105,
14
+ "map_at_1": 0.4719,
15
+ "map_at_10": 0.56564,
16
+ "map_at_100": 0.57159,
17
+ "map_at_1000": 0.57187,
18
+ "map_at_20": 0.56936,
19
+ "map_at_3": 0.54457,
20
+ "map_at_5": 0.55712,
21
+ "mrr_at_1": 0.4719,
22
+ "mrr_at_10": 0.5656386507936508,
23
+ "mrr_at_100": 0.5715907421190816,
24
+ "mrr_at_1000": 0.5718664157490458,
25
+ "mrr_at_20": 0.5693608111784156,
26
+ "mrr_at_3": 0.5445666666666666,
27
+ "mrr_at_5": 0.5571166666666667,
28
+ "nauc_map_at_1000_diff1": 0.6287499640238895,
29
+ "nauc_map_at_1000_max": 0.2034016409284853,
30
+ "nauc_map_at_1000_std": -0.024213719396550072,
31
+ "nauc_map_at_100_diff1": 0.6286832444938639,
32
+ "nauc_map_at_100_max": 0.20339278798339705,
33
+ "nauc_map_at_100_std": -0.024115897091813165,
34
+ "nauc_map_at_10_diff1": 0.6284471188698416,
35
+ "nauc_map_at_10_max": 0.20336793851550597,
36
+ "nauc_map_at_10_std": -0.026007494086294988,
37
+ "nauc_map_at_1_diff1": 0.6686374004258928,
38
+ "nauc_map_at_1_max": 0.18676159333349435,
39
+ "nauc_map_at_1_std": -0.05212337458336629,
40
+ "nauc_map_at_20_diff1": 0.6284087008703055,
41
+ "nauc_map_at_20_max": 0.20354268936083575,
42
+ "nauc_map_at_20_std": -0.024912987275978588,
43
+ "nauc_map_at_3_diff1": 0.6307094590356809,
44
+ "nauc_map_at_3_max": 0.2010374239943087,
45
+ "nauc_map_at_3_std": -0.03385505506633223,
46
+ "nauc_map_at_5_diff1": 0.6279907550563606,
47
+ "nauc_map_at_5_max": 0.20196529282308165,
48
+ "nauc_map_at_5_std": -0.029191933770232412,
49
+ "nauc_mrr_at_1000_diff1": 0.6287493903360661,
50
+ "nauc_mrr_at_1000_max": 0.20340088178138574,
51
+ "nauc_mrr_at_1000_std": -0.024214496755751477,
52
+ "nauc_mrr_at_100_diff1": 0.6286826712446165,
53
+ "nauc_mrr_at_100_max": 0.2033920294768933,
54
+ "nauc_mrr_at_100_std": -0.024116673713993626,
55
+ "nauc_mrr_at_10_diff1": 0.6284471188698416,
56
+ "nauc_mrr_at_10_max": 0.20336793851550597,
57
+ "nauc_mrr_at_10_std": -0.026007494086294988,
58
+ "nauc_mrr_at_1_diff1": 0.6686374004258928,
59
+ "nauc_mrr_at_1_max": 0.18676159333349435,
60
+ "nauc_mrr_at_1_std": -0.05212337458336629,
61
+ "nauc_mrr_at_20_diff1": 0.6284081309623656,
62
+ "nauc_mrr_at_20_max": 0.20354193566219686,
63
+ "nauc_mrr_at_20_std": -0.02491375969185557,
64
+ "nauc_mrr_at_3_diff1": 0.6307094590356809,
65
+ "nauc_mrr_at_3_max": 0.2010374239943087,
66
+ "nauc_mrr_at_3_std": -0.03385505506633223,
67
+ "nauc_mrr_at_5_diff1": 0.6279907550563606,
68
+ "nauc_mrr_at_5_max": 0.20196529282308165,
69
+ "nauc_mrr_at_5_std": -0.029191933770232412,
70
+ "nauc_ndcg_at_1000_diff1": 0.6165892576941506,
71
+ "nauc_ndcg_at_1000_max": 0.21333892985031438,
72
+ "nauc_ndcg_at_1000_std": -0.0005843485217573434,
73
+ "nauc_ndcg_at_100_diff1": 0.6142205015081649,
74
+ "nauc_ndcg_at_100_max": 0.21314468048070861,
75
+ "nauc_ndcg_at_100_std": 0.004184575062404777,
76
+ "nauc_ndcg_at_10_diff1": 0.612957070773732,
77
+ "nauc_ndcg_at_10_max": 0.21140675903403125,
78
+ "nauc_ndcg_at_10_std": -0.008762805121120773,
79
+ "nauc_ndcg_at_1_diff1": 0.6686374004258928,
80
+ "nauc_ndcg_at_1_max": 0.18676159333349435,
81
+ "nauc_ndcg_at_1_std": -0.05212337458336629,
82
+ "nauc_ndcg_at_20_diff1": 0.6123757854804859,
83
+ "nauc_ndcg_at_20_max": 0.21268533489575686,
84
+ "nauc_ndcg_at_20_std": -0.0035666036311125435,
85
+ "nauc_ndcg_at_3_diff1": 0.6180249982578259,
86
+ "nauc_ndcg_at_3_max": 0.2057248816308168,
87
+ "nauc_ndcg_at_3_std": -0.027131282756732888,
88
+ "nauc_ndcg_at_5_diff1": 0.6123891288274917,
89
+ "nauc_ndcg_at_5_max": 0.20774580758763359,
90
+ "nauc_ndcg_at_5_std": -0.01772572627091591,
91
+ "nauc_precision_at_1000_diff1": 0.46975805247471825,
92
+ "nauc_precision_at_1000_max": 0.44040103603630476,
93
+ "nauc_precision_at_1000_std": 0.46633678346960505,
94
+ "nauc_precision_at_100_diff1": 0.5026853368120134,
95
+ "nauc_precision_at_100_max": 0.3100523189652789,
96
+ "nauc_precision_at_100_std": 0.30930509103777437,
97
+ "nauc_precision_at_10_diff1": 0.548554527934185,
98
+ "nauc_precision_at_10_max": 0.24671333595263523,
99
+ "nauc_precision_at_10_std": 0.07008771278395116,
100
+ "nauc_precision_at_1_diff1": 0.6686374004258928,
101
+ "nauc_precision_at_1_max": 0.18676159333349435,
102
+ "nauc_precision_at_1_std": -0.05212337458336629,
103
+ "nauc_precision_at_20_diff1": 0.531505354951576,
104
+ "nauc_precision_at_20_max": 0.26312956602606635,
105
+ "nauc_precision_at_20_std": 0.11977453322496394,
106
+ "nauc_precision_at_3_diff1": 0.5773867475454311,
107
+ "nauc_precision_at_3_max": 0.22071139718699107,
108
+ "nauc_precision_at_3_std": -0.005370102746454722,
109
+ "nauc_precision_at_5_diff1": 0.556826582018447,
110
+ "nauc_precision_at_5_max": 0.2283534205639564,
111
+ "nauc_precision_at_5_std": 0.024599254847439068,
112
+ "nauc_recall_at_1000_diff1": 0.46975805247472324,
113
+ "nauc_recall_at_1000_max": 0.4404010360363066,
114
+ "nauc_recall_at_1000_std": 0.46633678346960866,
115
+ "nauc_recall_at_100_diff1": 0.5026853368120127,
116
+ "nauc_recall_at_100_max": 0.31005231896527796,
117
+ "nauc_recall_at_100_std": 0.3093050910377749,
118
+ "nauc_recall_at_10_diff1": 0.548554527934184,
119
+ "nauc_recall_at_10_max": 0.24671333595263586,
120
+ "nauc_recall_at_10_std": 0.07008771278395223,
121
+ "nauc_recall_at_1_diff1": 0.6686374004258928,
122
+ "nauc_recall_at_1_max": 0.18676159333349435,
123
+ "nauc_recall_at_1_std": -0.05212337458336629,
124
+ "nauc_recall_at_20_diff1": 0.5315053549515758,
125
+ "nauc_recall_at_20_max": 0.2631295660260669,
126
+ "nauc_recall_at_20_std": 0.11977453322496431,
127
+ "nauc_recall_at_3_diff1": 0.5773867475454315,
128
+ "nauc_recall_at_3_max": 0.22071139718699118,
129
+ "nauc_recall_at_3_std": -0.0053701027464555,
130
+ "nauc_recall_at_5_diff1": 0.5568265820184471,
131
+ "nauc_recall_at_5_max": 0.22835342056395666,
132
+ "nauc_recall_at_5_std": 0.02459925484743956,
133
+ "ndcg_at_1": 0.4719,
134
+ "ndcg_at_10": 0.61105,
135
+ "ndcg_at_100": 0.64036,
136
+ "ndcg_at_1000": 0.6481,
137
+ "ndcg_at_20": 0.62447,
138
+ "ndcg_at_3": 0.56806,
139
+ "ndcg_at_5": 0.59064,
140
+ "precision_at_1": 0.4719,
141
+ "precision_at_10": 0.07533,
142
+ "precision_at_100": 0.00891,
143
+ "precision_at_1000": 0.00095,
144
+ "precision_at_20": 0.04031,
145
+ "precision_at_3": 0.212,
146
+ "precision_at_5": 0.13814,
147
+ "recall_at_1": 0.4719,
148
+ "recall_at_10": 0.7533,
149
+ "recall_at_100": 0.8912,
150
+ "recall_at_1000": 0.9531,
151
+ "recall_at_20": 0.8061,
152
+ "recall_at_3": 0.636,
153
+ "recall_at_5": 0.6907
154
+ }
155
+ ]
156
+ },
157
+ "task_name": "RiaNewsRetrieval"
158
+ }
results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/RuBQReranking.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "2e96b8f098fa4b0950fc58eacadeb31c0d0c7fa2",
3
+ "evaluation_time": 935.4835982322693,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.7028418717346855,
14
+ "map": 0.7028418717346855,
15
+ "mrr": 0.7563855372366011,
16
+ "nAUC_map_diff1": 0.3723907795868552,
17
+ "nAUC_map_max": 0.22814889043420492,
18
+ "nAUC_map_std": 0.15025167598805644,
19
+ "nAUC_mrr_diff1": 0.427561251537096,
20
+ "nAUC_mrr_max": 0.29252104240163374,
21
+ "nAUC_mrr_std": 0.16902210776192714
22
+ }
23
+ ]
24
+ },
25
+ "task_name": "RuBQReranking"
26
+ }
results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/RuBQRetrieval.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "e19b6ffa60b3bc248e0b41f4cc37c26a55c2a67b",
3
+ "evaluation_time": 1340.1966524124146,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.65167,
14
+ "map_at_1": 0.39151,
15
+ "map_at_10": 0.57358,
16
+ "map_at_100": 0.5852,
17
+ "map_at_1000": 0.58555,
18
+ "map_at_20": 0.5812,
19
+ "map_at_3": 0.52439,
20
+ "map_at_5": 0.55277,
21
+ "mrr_at_1": 0.5585106382978723,
22
+ "mrr_at_10": 0.6664867818679876,
23
+ "mrr_at_100": 0.6709254578889642,
24
+ "mrr_at_1000": 0.6710152668820618,
25
+ "mrr_at_20": 0.6693232592644961,
26
+ "mrr_at_3": 0.6447990543735225,
27
+ "mrr_at_5": 0.658274231678487,
28
+ "nauc_map_at_1000_diff1": 0.36531591057326135,
29
+ "nauc_map_at_1000_max": 0.3035653661266291,
30
+ "nauc_map_at_1000_std": 0.005254597941377395,
31
+ "nauc_map_at_100_diff1": 0.3652588812366525,
32
+ "nauc_map_at_100_max": 0.30363406949043575,
33
+ "nauc_map_at_100_std": 0.00539823574349937,
34
+ "nauc_map_at_10_diff1": 0.3599205834203505,
35
+ "nauc_map_at_10_max": 0.3012657738696787,
36
+ "nauc_map_at_10_std": -4.3776833764746496e-05,
37
+ "nauc_map_at_1_diff1": 0.4068781443901671,
38
+ "nauc_map_at_1_max": 0.2069025852633963,
39
+ "nauc_map_at_1_std": -0.028159099303659164,
40
+ "nauc_map_at_20_diff1": 0.3632085940089257,
41
+ "nauc_map_at_20_max": 0.30369858488942186,
42
+ "nauc_map_at_20_std": 0.005490637898915787,
43
+ "nauc_map_at_3_diff1": 0.3674666430542543,
44
+ "nauc_map_at_3_max": 0.27341770449371994,
45
+ "nauc_map_at_3_std": -0.0299484654813272,
46
+ "nauc_map_at_5_diff1": 0.3639287998557502,
47
+ "nauc_map_at_5_max": 0.29046081589915973,
48
+ "nauc_map_at_5_std": -0.0150391154964749,
49
+ "nauc_mrr_at_1000_diff1": 0.43898998213997864,
50
+ "nauc_mrr_at_1000_max": 0.356688952241998,
51
+ "nauc_mrr_at_1000_std": -0.019103601325945414,
52
+ "nauc_mrr_at_100_diff1": 0.43899463746675116,
53
+ "nauc_mrr_at_100_max": 0.3567232095267896,
54
+ "nauc_mrr_at_100_std": -0.01908249410095766,
55
+ "nauc_mrr_at_10_diff1": 0.4380133613648583,
56
+ "nauc_mrr_at_10_max": 0.35963278324407727,
57
+ "nauc_mrr_at_10_std": -0.019943373571629102,
58
+ "nauc_mrr_at_1_diff1": 0.45744567048708495,
59
+ "nauc_mrr_at_1_max": 0.3185087628600214,
60
+ "nauc_mrr_at_1_std": -0.039586670000522815,
61
+ "nauc_mrr_at_20_diff1": 0.43846580304956645,
62
+ "nauc_mrr_at_20_max": 0.3575530067448174,
63
+ "nauc_mrr_at_20_std": -0.01769537099436092,
64
+ "nauc_mrr_at_3_diff1": 0.43257374543992816,
65
+ "nauc_mrr_at_3_max": 0.3526088977070129,
66
+ "nauc_mrr_at_3_std": -0.031830883555102794,
67
+ "nauc_mrr_at_5_diff1": 0.4370843589532293,
68
+ "nauc_mrr_at_5_max": 0.35847116461695416,
69
+ "nauc_mrr_at_5_std": -0.022925984021903743,
70
+ "nauc_ndcg_at_1000_diff1": 0.3846228579932152,
71
+ "nauc_ndcg_at_1000_max": 0.3385203759660593,
72
+ "nauc_ndcg_at_1000_std": 0.025326913976533756,
73
+ "nauc_ndcg_at_100_diff1": 0.382922798833745,
74
+ "nauc_ndcg_at_100_max": 0.34072439256810394,
75
+ "nauc_ndcg_at_100_std": 0.030850535620277262,
76
+ "nauc_ndcg_at_10_diff1": 0.3664216836031285,
77
+ "nauc_ndcg_at_10_max": 0.3408465736838746,
78
+ "nauc_ndcg_at_10_std": 0.02028073320293512,
79
+ "nauc_ndcg_at_1_diff1": 0.45744567048708495,
80
+ "nauc_ndcg_at_1_max": 0.3185087628600214,
81
+ "nauc_ndcg_at_1_std": -0.039586670000522815,
82
+ "nauc_ndcg_at_20_diff1": 0.374660140644635,
83
+ "nauc_ndcg_at_20_max": 0.3448991496619514,
84
+ "nauc_ndcg_at_20_std": 0.03777121024339731,
85
+ "nauc_ndcg_at_3_diff1": 0.3740162479451307,
86
+ "nauc_ndcg_at_3_max": 0.30444229908458836,
87
+ "nauc_ndcg_at_3_std": -0.027354767912133024,
88
+ "nauc_ndcg_at_5_diff1": 0.37148180990292456,
89
+ "nauc_ndcg_at_5_max": 0.3232666768938328,
90
+ "nauc_ndcg_at_5_std": -0.005629486117054672,
91
+ "nauc_precision_at_1000_diff1": -0.07692478248741134,
92
+ "nauc_precision_at_1000_max": 0.10546345110161712,
93
+ "nauc_precision_at_1000_std": 0.09072980199575012,
94
+ "nauc_precision_at_100_diff1": -0.05544005240602762,
95
+ "nauc_precision_at_100_max": 0.14687673785864958,
96
+ "nauc_precision_at_100_std": 0.12306702671036018,
97
+ "nauc_precision_at_10_diff1": 0.0069682779710068254,
98
+ "nauc_precision_at_10_max": 0.25108941153969105,
99
+ "nauc_precision_at_10_std": 0.11151018137974626,
100
+ "nauc_precision_at_1_diff1": 0.45744567048708495,
101
+ "nauc_precision_at_1_max": 0.3185087628600214,
102
+ "nauc_precision_at_1_std": -0.039586670000522815,
103
+ "nauc_precision_at_20_diff1": -0.014568797025451129,
104
+ "nauc_precision_at_20_max": 0.22056869274585322,
105
+ "nauc_precision_at_20_std": 0.1484714192270009,
106
+ "nauc_precision_at_3_diff1": 0.16018213606544024,
107
+ "nauc_precision_at_3_max": 0.30585149317022786,
108
+ "nauc_precision_at_3_std": 0.008720314924668173,
109
+ "nauc_precision_at_5_diff1": 0.08676098234046163,
110
+ "nauc_precision_at_5_max": 0.28789719223139537,
111
+ "nauc_precision_at_5_std": 0.059939597328937146,
112
+ "nauc_recall_at_1000_diff1": 0.4355537994702468,
113
+ "nauc_recall_at_1000_max": 0.6211736142405392,
114
+ "nauc_recall_at_1000_std": 0.6889712140530266,
115
+ "nauc_recall_at_100_diff1": 0.3040181700049794,
116
+ "nauc_recall_at_100_max": 0.4278044235579686,
117
+ "nauc_recall_at_100_std": 0.34883596833238506,
118
+ "nauc_recall_at_10_diff1": 0.2662561079056947,
119
+ "nauc_recall_at_10_max": 0.37801380089849934,
120
+ "nauc_recall_at_10_std": 0.11158230029055471,
121
+ "nauc_recall_at_1_diff1": 0.4068781443901671,
122
+ "nauc_recall_at_1_max": 0.2069025852633963,
123
+ "nauc_recall_at_1_std": -0.028159099303659164,
124
+ "nauc_recall_at_20_diff1": 0.2807360018857007,
125
+ "nauc_recall_at_20_max": 0.40740384224903375,
126
+ "nauc_recall_at_20_std": 0.23705892616586494,
127
+ "nauc_recall_at_3_diff1": 0.31433810757274283,
128
+ "nauc_recall_at_3_max": 0.28054691827784783,
129
+ "nauc_recall_at_3_std": -0.03140434205104806,
130
+ "nauc_recall_at_5_diff1": 0.30352635635604164,
131
+ "nauc_recall_at_5_max": 0.32593953695023975,
132
+ "nauc_recall_at_5_std": 0.02104189655512243,
133
+ "ndcg_at_1": 0.55851,
134
+ "ndcg_at_10": 0.65167,
135
+ "ndcg_at_100": 0.69101,
136
+ "ndcg_at_1000": 0.69751,
137
+ "ndcg_at_20": 0.67121,
138
+ "ndcg_at_3": 0.58256,
139
+ "ndcg_at_5": 0.61573,
140
+ "precision_at_1": 0.55851,
141
+ "precision_at_10": 0.12784,
142
+ "precision_at_100": 0.01566,
143
+ "precision_at_1000": 0.00165,
144
+ "precision_at_20": 0.07004,
145
+ "precision_at_3": 0.31678,
146
+ "precision_at_5": 0.22021,
147
+ "recall_at_1": 0.39151,
148
+ "recall_at_10": 0.78596,
149
+ "recall_at_100": 0.94161,
150
+ "recall_at_1000": 0.98523,
151
+ "recall_at_20": 0.85088,
152
+ "recall_at_3": 0.61443,
153
+ "recall_at_5": 0.69258
154
+ }
155
+ ]
156
+ },
157
+ "task_name": "RuBQRetrieval"
158
+ }
results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/RuReviewsClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "f6d2c31f4dc6b88f468552750bfec05b4b41b05a",
3
+ "evaluation_time": 41.255961656570435,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.72890625,
10
+ "f1": 0.7231523669494578,
11
+ "f1_weighted": 0.7231535324344908,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.72890625,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.7392578125,
20
+ "f1": 0.7374470623959578,
21
+ "f1_weighted": 0.7374483662714908
22
+ },
23
+ {
24
+ "accuracy": 0.71337890625,
25
+ "f1": 0.7025077487164691,
26
+ "f1_weighted": 0.7024996324421906
27
+ },
28
+ {
29
+ "accuracy": 0.7275390625,
30
+ "f1": 0.7181835375736103,
31
+ "f1_weighted": 0.7181760978819921
32
+ },
33
+ {
34
+ "accuracy": 0.744140625,
35
+ "f1": 0.7441710349655026,
36
+ "f1_weighted": 0.7441865289188956
37
+ },
38
+ {
39
+ "accuracy": 0.755859375,
40
+ "f1": 0.7578381803383026,
41
+ "f1_weighted": 0.7578548320823348
42
+ },
43
+ {
44
+ "accuracy": 0.7080078125,
45
+ "f1": 0.7018243857792056,
46
+ "f1_weighted": 0.7018253776860007
47
+ },
48
+ {
49
+ "accuracy": 0.71240234375,
50
+ "f1": 0.7081317089689931,
51
+ "f1_weighted": 0.7081352680570496
52
+ },
53
+ {
54
+ "accuracy": 0.74462890625,
55
+ "f1": 0.7422437769434015,
56
+ "f1_weighted": 0.7422450161497445
57
+ },
58
+ {
59
+ "accuracy": 0.70458984375,
60
+ "f1": 0.6812758637978081,
61
+ "f1_weighted": 0.6812528526304156
62
+ },
63
+ {
64
+ "accuracy": 0.7392578125,
65
+ "f1": 0.7379003700153285,
66
+ "f1_weighted": 0.7379113522247933
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "RuReviewsClassification"
73
+ }
results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/RuSTSBenchmarkSTS.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "7cf24f325c6da6195df55bef3d86b5e0616f3018",
3
+ "evaluation_time": 35.05771613121033,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "cosine_pearson": 0.8035423362397724,
10
+ "cosine_spearman": 0.8002815903929272,
11
+ "euclidean_pearson": 0.7871602467695,
12
+ "euclidean_spearman": 0.8002815903929272,
13
+ "hf_subset": "default",
14
+ "languages": [
15
+ "rus-Cyrl"
16
+ ],
17
+ "main_score": 0.8002815903929272,
18
+ "manhattan_pearson": 0.7872361804712056,
19
+ "manhattan_spearman": 0.8006245387025261,
20
+ "pearson": 0.8035423362397724,
21
+ "spearman": 0.8002815903929272
22
+ }
23
+ ]
24
+ },
25
+ "task_name": "RuSTSBenchmarkSTS"
26
+ }
results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/RuSciBenchGRNTIClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "673a610d6d3dd91a547a0d57ae1b56f37ebbf6a1",
3
+ "evaluation_time": 175.38222646713257,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.67353515625,
10
+ "f1": 0.6605647875373758,
11
+ "f1_weighted": 0.6606887921446307,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.67353515625,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.69287109375,
20
+ "f1": 0.6853943231009784,
21
+ "f1_weighted": 0.6855214162196541
22
+ },
23
+ {
24
+ "accuracy": 0.67333984375,
25
+ "f1": 0.6613774754925439,
26
+ "f1_weighted": 0.6614810808494207
27
+ },
28
+ {
29
+ "accuracy": 0.67529296875,
30
+ "f1": 0.6624714325077754,
31
+ "f1_weighted": 0.6626174476178033
32
+ },
33
+ {
34
+ "accuracy": 0.67333984375,
35
+ "f1": 0.6590053333191753,
36
+ "f1_weighted": 0.6591454969159511
37
+ },
38
+ {
39
+ "accuracy": 0.67822265625,
40
+ "f1": 0.6671696203557742,
41
+ "f1_weighted": 0.6672845357084765
42
+ },
43
+ {
44
+ "accuracy": 0.65869140625,
45
+ "f1": 0.6492278612017663,
46
+ "f1_weighted": 0.6492791748778237
47
+ },
48
+ {
49
+ "accuracy": 0.6806640625,
50
+ "f1": 0.6619317114695685,
51
+ "f1_weighted": 0.6621007634082923
52
+ },
53
+ {
54
+ "accuracy": 0.666015625,
55
+ "f1": 0.6511894116309452,
56
+ "f1_weighted": 0.6512824516783355
57
+ },
58
+ {
59
+ "accuracy": 0.68310546875,
60
+ "f1": 0.670518919029475,
61
+ "f1_weighted": 0.6706549028651936
62
+ },
63
+ {
64
+ "accuracy": 0.65380859375,
65
+ "f1": 0.6373617872657549,
66
+ "f1_weighted": 0.6375206513053561
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "RuSciBenchGRNTIClassification"
73
+ }
results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/RuSciBenchGRNTIClusteringP2P.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "673a610d6d3dd91a547a0d57ae1b56f37ebbf6a1",
3
+ "evaluation_time": 135.73782348632812,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.6252845532865546,
14
+ "v_measure": 0.6252845532865546,
15
+ "v_measure_std": 0.007482042082035663,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.6207489349333961,
19
+ 0.6203220132070127,
20
+ 0.6280279883408472,
21
+ 0.6356338579930063,
22
+ 0.6354055280425936,
23
+ 0.63334127065613,
24
+ 0.6229906966991013,
25
+ 0.625483492226909,
26
+ 0.6195526387184621,
27
+ 0.611339112048087
28
+ ]
29
+ }
30
+ }
31
+ ]
32
+ },
33
+ "task_name": "RuSciBenchGRNTIClusteringP2P"
34
+ }
results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/RuSciBenchOECDClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "26c88e99dcaba32bb45d0e1bfc21902337f6d471",
3
+ "evaluation_time": 167.4153139591217,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.54384765625,
10
+ "f1": 0.5179523478811618,
11
+ "f1_weighted": 0.5179488778224148,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.54384765625,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.548828125,
20
+ "f1": 0.5239627248031066,
21
+ "f1_weighted": 0.5239497982202763
22
+ },
23
+ {
24
+ "accuracy": 0.54931640625,
25
+ "f1": 0.5271992304581379,
26
+ "f1_weighted": 0.5271890795585629
27
+ },
28
+ {
29
+ "accuracy": 0.5478515625,
30
+ "f1": 0.5219686975643086,
31
+ "f1_weighted": 0.5220594739324317
32
+ },
33
+ {
34
+ "accuracy": 0.5322265625,
35
+ "f1": 0.4922933125259625,
36
+ "f1_weighted": 0.49233519921194857
37
+ },
38
+ {
39
+ "accuracy": 0.55126953125,
40
+ "f1": 0.5237163168217409,
41
+ "f1_weighted": 0.5236558626083773
42
+ },
43
+ {
44
+ "accuracy": 0.56103515625,
45
+ "f1": 0.5399471841775259,
46
+ "f1_weighted": 0.5399500524748373
47
+ },
48
+ {
49
+ "accuracy": 0.53662109375,
50
+ "f1": 0.5037337634838176,
51
+ "f1_weighted": 0.5038136505311142
52
+ },
53
+ {
54
+ "accuracy": 0.5595703125,
55
+ "f1": 0.5426667594880292,
56
+ "f1_weighted": 0.5426221545649779
57
+ },
58
+ {
59
+ "accuracy": 0.51513671875,
60
+ "f1": 0.4886461135359976,
61
+ "f1_weighted": 0.4885845691306286
62
+ },
63
+ {
64
+ "accuracy": 0.53662109375,
65
+ "f1": 0.5153893759529918,
66
+ "f1_weighted": 0.5153289379909944
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "RuSciBenchOECDClassification"
73
+ }
results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/RuSciBenchOECDClusteringP2P.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "26c88e99dcaba32bb45d0e1bfc21902337f6d471",
3
+ "evaluation_time": 128.42090034484863,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.5389131877733215,
14
+ "v_measure": 0.5389131877733215,
15
+ "v_measure_std": 0.007301076799965066,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.5319407880397748,
19
+ 0.5351366452965421,
20
+ 0.5383348087959062,
21
+ 0.5467699170735407,
22
+ 0.5266030233104176,
23
+ 0.5361547750555381,
24
+ 0.550489123904105,
25
+ 0.5340852555806267,
26
+ 0.5414814687456001,
27
+ 0.5481360719311642
28
+ ]
29
+ }
30
+ }
31
+ ]
32
+ },
33
+ "task_name": "RuSciBenchOECDClusteringP2P"
34
+ }
results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/STS22.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3",
3
+ "evaluation_time": 22.604440450668335,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "cosine_pearson": 0.5058567549374162,
10
+ "cosine_spearman": 0.6037052355031338,
11
+ "euclidean_pearson": 0.5116237069159542,
12
+ "euclidean_spearman": 0.6037052355031338,
13
+ "hf_subset": "ru",
14
+ "languages": [
15
+ "rus-Cyrl"
16
+ ],
17
+ "main_score": 0.6037052355031338,
18
+ "manhattan_pearson": 0.5174886232911461,
19
+ "manhattan_spearman": 0.6042484038697743,
20
+ "pearson": 0.5058567549374162,
21
+ "spearman": 0.6037052355031338
22
+ }
23
+ ]
24
+ },
25
+ "task_name": "STS22"
26
+ }
results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/SensitiveTopicsClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "416b34a802308eac30e4192afc0ff99bb8dcc7f2",
3
+ "evaluation_time": 62.089826345443726,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.356201171875,
10
+ "f1": 0.44647781989896557,
11
+ "hf_subset": "default",
12
+ "languages": [
13
+ "rus-Cyrl"
14
+ ],
15
+ "lrap": 0.5571139865451369,
16
+ "main_score": 0.356201171875,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.37451171875,
20
+ "f1": 0.45808689195543106,
21
+ "lrap": 0.5513780381944424
22
+ },
23
+ {
24
+ "accuracy": 0.35107421875,
25
+ "f1": 0.4430090517169707,
26
+ "lrap": 0.5574951171874984
27
+ },
28
+ {
29
+ "accuracy": 0.35107421875,
30
+ "f1": 0.4124270308941054,
31
+ "lrap": 0.5223117404513843
32
+ },
33
+ {
34
+ "accuracy": 0.365234375,
35
+ "f1": 0.4369513135160103,
36
+ "lrap": 0.5480957031249971
37
+ },
38
+ {
39
+ "accuracy": 0.34130859375,
40
+ "f1": 0.46223531801542667,
41
+ "lrap": 0.5410970052083304
42
+ },
43
+ {
44
+ "accuracy": 0.34326171875,
45
+ "f1": 0.4408524141069843,
46
+ "lrap": 0.5684611002604157
47
+ },
48
+ {
49
+ "accuracy": 0.35498046875,
50
+ "f1": 0.44616716252905764,
51
+ "lrap": 0.5566880967881925
52
+ },
53
+ {
54
+ "accuracy": 0.34912109375,
55
+ "f1": 0.44250290450356894,
56
+ "lrap": 0.5585801866319432
57
+ },
58
+ {
59
+ "accuracy": 0.3662109375,
60
+ "f1": 0.4479884575085484,
61
+ "lrap": 0.5710449218749987
62
+ },
63
+ {
64
+ "accuracy": 0.365234375,
65
+ "f1": 0.47455765424355256,
66
+ "lrap": 0.5959879557291672
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "SensitiveTopicsClassification"
73
+ }
results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/TERRa.json ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "7b58f24536063837d644aab9a023c62199b2a612",
3
+ "evaluation_time": 8.901613235473633,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "dev": [
8
+ {
9
+ "cosine_accuracy": 0.6872964169381107,
10
+ "cosine_accuracy_threshold": 0.6442170143127441,
11
+ "cosine_ap": 0.6944446593807762,
12
+ "cosine_f1": 0.7163323782234956,
13
+ "cosine_f1_threshold": 0.6143864393234253,
14
+ "cosine_precision": 0.6377551020408163,
15
+ "cosine_recall": 0.8169934640522876,
16
+ "dot_accuracy": 0.6872964169381107,
17
+ "dot_accuracy_threshold": 0.6442172527313232,
18
+ "dot_ap": 0.6944446593807762,
19
+ "dot_f1": 0.7163323782234956,
20
+ "dot_f1_threshold": 0.6143864393234253,
21
+ "dot_precision": 0.6377551020408163,
22
+ "dot_recall": 0.8169934640522876,
23
+ "euclidean_accuracy": 0.6872964169381107,
24
+ "euclidean_accuracy_threshold": 0.8435434699058533,
25
+ "euclidean_ap": 0.6944446593807762,
26
+ "euclidean_f1": 0.7163323782234956,
27
+ "euclidean_f1_threshold": 0.8781953454017639,
28
+ "euclidean_precision": 0.6377551020408163,
29
+ "euclidean_recall": 0.8169934640522876,
30
+ "hf_subset": "default",
31
+ "languages": [
32
+ "rus-Cyrl"
33
+ ],
34
+ "main_score": 0.6949609366786602,
35
+ "manhattan_accuracy": 0.6905537459283387,
36
+ "manhattan_accuracy_threshold": 42.73845672607422,
37
+ "manhattan_ap": 0.6949609366786602,
38
+ "manhattan_f1": 0.7191011235955056,
39
+ "manhattan_f1_threshold": 44.631561279296875,
40
+ "manhattan_precision": 0.6305418719211823,
41
+ "manhattan_recall": 0.8366013071895425,
42
+ "max_accuracy": 0.6905537459283387,
43
+ "max_ap": 0.6949609366786602,
44
+ "max_f1": 0.7191011235955056,
45
+ "max_precision": 0.6377551020408163,
46
+ "max_recall": 0.8366013071895425,
47
+ "similarity_accuracy": 0.6872964169381107,
48
+ "similarity_accuracy_threshold": 0.6442170143127441,
49
+ "similarity_ap": 0.6944446593807762,
50
+ "similarity_f1": 0.7163323782234956,
51
+ "similarity_f1_threshold": 0.6143864393234253,
52
+ "similarity_precision": 0.6377551020408163,
53
+ "similarity_recall": 0.8169934640522876
54
+ }
55
+ ]
56
+ },
57
+ "task_name": "TERRa"
58
+ }
results/Alibaba-NLP__gte-Qwen1.5-7B-instruct/07d27e5226328010336563bc1b564a5e3436a298/model_meta.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"name": "Alibaba-NLP/gte-Qwen1.5-7B-instruct", "revision": "07d27e5226328010336563bc1b564a5e3436a298", "release_date": "2024-04-20", "languages": ["eng_Latn"], "n_parameters": 7720000000, "memory_usage": null, "max_tokens": 32768, "embed_dim": 4096, "license": "apache-2.0", "open_weights": true, "public_training_data": null, "public_training_code": null, "framework": ["Sentence Transformers", "PyTorch"], "reference": "https://huggingface.co/Alibaba-NLP/gte-Qwen1.5-7B-instruct", "similarity_fn_name": "cosine", "use_instructions": true, "zero_shot_benchmarks": null, "loader": "instruct_wrapper"}
results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/CEDRClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "c0ba03d058e3e1b2f3fd20518875a4563dd12db4",
3
+ "evaluation_time": 37.375492095947266,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.5298087141339002,
10
+ "f1": 0.5463940656491209,
11
+ "hf_subset": "default",
12
+ "languages": [
13
+ "rus-Cyrl"
14
+ ],
15
+ "lrap": 0.7920270988310386,
16
+ "main_score": 0.5298087141339002,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.5324123273113709,
20
+ "f1": 0.5592166227592935,
21
+ "lrap": 0.8064293304994756
22
+ },
23
+ {
24
+ "accuracy": 0.5371944739638682,
25
+ "f1": 0.5142188501636664,
26
+ "lrap": 0.7502656748140369
27
+ },
28
+ {
29
+ "accuracy": 0.5159404888416578,
30
+ "f1": 0.5160018563245181,
31
+ "lrap": 0.7749468650372026
32
+ },
33
+ {
34
+ "accuracy": 0.5069075451647184,
35
+ "f1": 0.5878196176254,
36
+ "lrap": 0.8408076514346504
37
+ },
38
+ {
39
+ "accuracy": 0.5696068012752391,
40
+ "f1": 0.5584886229071322,
41
+ "lrap": 0.7752391073326336
42
+ },
43
+ {
44
+ "accuracy": 0.485653560042508,
45
+ "f1": 0.4924472291345112,
46
+ "lrap": 0.7523379383634526
47
+ },
48
+ {
49
+ "accuracy": 0.5717321997874601,
50
+ "f1": 0.5654779582641724,
51
+ "lrap": 0.8140807651434716
52
+ },
53
+ {
54
+ "accuracy": 0.5696068012752391,
55
+ "f1": 0.5883934067765482,
56
+ "lrap": 0.8068012752391147
57
+ },
58
+ {
59
+ "accuracy": 0.5223166843783209,
60
+ "f1": 0.5640478579144856,
61
+ "lrap": 0.8112646121147783
62
+ },
63
+ {
64
+ "accuracy": 0.4867162592986185,
65
+ "f1": 0.5178286346214805,
66
+ "lrap": 0.7880977683315701
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "CEDRClassification"
73
+ }
results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/GeoreviewClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "3765c0d1de6b7d264bc459433c45e5a75513839c",
3
+ "evaluation_time": 44.132378578186035,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.566259765625,
10
+ "f1": 0.539998920872917,
11
+ "f1_weighted": 0.5399549345605024,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.566259765625,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.5322265625,
20
+ "f1": 0.48139864060427257,
21
+ "f1_weighted": 0.48132232760263777
22
+ },
23
+ {
24
+ "accuracy": 0.55029296875,
25
+ "f1": 0.5171318055563247,
26
+ "f1_weighted": 0.5170278436369578
27
+ },
28
+ {
29
+ "accuracy": 0.572265625,
30
+ "f1": 0.5486234952018896,
31
+ "f1_weighted": 0.5485959201625947
32
+ },
33
+ {
34
+ "accuracy": 0.58935546875,
35
+ "f1": 0.5722936933773077,
36
+ "f1_weighted": 0.5722547203160783
37
+ },
38
+ {
39
+ "accuracy": 0.56494140625,
40
+ "f1": 0.5388546516897115,
41
+ "f1_weighted": 0.5387814083242941
42
+ },
43
+ {
44
+ "accuracy": 0.56591796875,
45
+ "f1": 0.5462138892057763,
46
+ "f1_weighted": 0.5462178843455513
47
+ },
48
+ {
49
+ "accuracy": 0.59375,
50
+ "f1": 0.5773603022597097,
51
+ "f1_weighted": 0.5773082361244717
52
+ },
53
+ {
54
+ "accuracy": 0.59521484375,
55
+ "f1": 0.5751009322819041,
56
+ "f1_weighted": 0.5750472483732044
57
+ },
58
+ {
59
+ "accuracy": 0.56201171875,
60
+ "f1": 0.5491981623674013,
61
+ "f1_weighted": 0.5491716229160677
62
+ },
63
+ {
64
+ "accuracy": 0.53662109375,
65
+ "f1": 0.4938136361848744,
66
+ "f1_weighted": 0.4938221338031655
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "GeoreviewClassification"
73
+ }
results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/GeoreviewClusteringP2P.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "97a313c8fc85b47f13f33e7e9a95c1ad888c7fec",
3
+ "evaluation_time": 114.29212379455566,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.7297991176715518,
14
+ "v_measure": 0.7297991176715518,
15
+ "v_measure_std": 0.007350022319590748,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.7279130356072366,
19
+ 0.7248414099889655,
20
+ 0.7388993666959305,
21
+ 0.7345422035157355,
22
+ 0.726717152537628,
23
+ 0.7284265796154791,
24
+ 0.7278873043269999,
25
+ 0.737814237240807,
26
+ 0.7134311856739789,
27
+ 0.7375187015127566
28
+ ]
29
+ }
30
+ }
31
+ ]
32
+ },
33
+ "task_name": "GeoreviewClusteringP2P"
34
+ }
results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/HeadlineClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "2fe05ee6b5832cda29f2ef7aaad7b7fe6a3609eb",
3
+ "evaluation_time": 38.52833533287048,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.84658203125,
10
+ "f1": 0.8465626481246808,
11
+ "f1_weighted": 0.8465499669959881,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.84658203125,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.84814453125,
20
+ "f1": 0.8485091881722077,
21
+ "f1_weighted": 0.8484974187341153
22
+ },
23
+ {
24
+ "accuracy": 0.8564453125,
25
+ "f1": 0.8562652364651577,
26
+ "f1_weighted": 0.8562614677431917
27
+ },
28
+ {
29
+ "accuracy": 0.8486328125,
30
+ "f1": 0.8490051951829197,
31
+ "f1_weighted": 0.8489889619466362
32
+ },
33
+ {
34
+ "accuracy": 0.8525390625,
35
+ "f1": 0.8523523149025521,
36
+ "f1_weighted": 0.8523375830518336
37
+ },
38
+ {
39
+ "accuracy": 0.84814453125,
40
+ "f1": 0.8483947141083181,
41
+ "f1_weighted": 0.8483837276823756
42
+ },
43
+ {
44
+ "accuracy": 0.83837890625,
45
+ "f1": 0.8384119855111208,
46
+ "f1_weighted": 0.8383923360706916
47
+ },
48
+ {
49
+ "accuracy": 0.830078125,
50
+ "f1": 0.8300623133965269,
51
+ "f1_weighted": 0.8300509323362059
52
+ },
53
+ {
54
+ "accuracy": 0.849609375,
55
+ "f1": 0.8494263136535779,
56
+ "f1_weighted": 0.8494137196241152
57
+ },
58
+ {
59
+ "accuracy": 0.845703125,
60
+ "f1": 0.8454667379622213,
61
+ "f1_weighted": 0.8454593599599951
62
+ },
63
+ {
64
+ "accuracy": 0.84814453125,
65
+ "f1": 0.8477324818922055,
66
+ "f1_weighted": 0.8477141628107198
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "HeadlineClassification"
73
+ }
results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/InappropriatenessClassification.json ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "601651fdc45ef243751676e62dd7a19f491c0285",
3
+ "evaluation_time": 34.62934637069702,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.6583984375,
10
+ "ap": 0.6087707151727988,
11
+ "ap_weighted": 0.6087707151727988,
12
+ "f1": 0.6559072716864514,
13
+ "f1_weighted": 0.6559072716864514,
14
+ "hf_subset": "default",
15
+ "languages": [
16
+ "rus-Cyrl"
17
+ ],
18
+ "main_score": 0.6583984375,
19
+ "scores_per_experiment": [
20
+ {
21
+ "accuracy": 0.703125,
22
+ "ap": 0.6493566176470589,
23
+ "ap_weighted": 0.6493566176470589,
24
+ "f1": 0.7017311886064257,
25
+ "f1_weighted": 0.7017311886064257
26
+ },
27
+ {
28
+ "accuracy": 0.7041015625,
29
+ "ap": 0.6430673452524038,
30
+ "ap_weighted": 0.6430673452524038,
31
+ "f1": 0.7040835011902582,
32
+ "f1_weighted": 0.7040835011902582
33
+ },
34
+ {
35
+ "accuracy": 0.68603515625,
36
+ "ap": 0.6355622327055823,
37
+ "ap_weighted": 0.6355622327055823,
38
+ "f1": 0.6832804090024996,
39
+ "f1_weighted": 0.6832804090024996
40
+ },
41
+ {
42
+ "accuracy": 0.59716796875,
43
+ "ap": 0.557907236921408,
44
+ "ap_weighted": 0.557907236921408,
45
+ "f1": 0.5971517368897281,
46
+ "f1_weighted": 0.5971517368897281
47
+ },
48
+ {
49
+ "accuracy": 0.64501953125,
50
+ "ap": 0.5957410497572816,
51
+ "ap_weighted": 0.5957410497572816,
52
+ "f1": 0.64422142013121,
53
+ "f1_weighted": 0.64422142013121
54
+ },
55
+ {
56
+ "accuracy": 0.6181640625,
57
+ "ap": 0.5703048346742543,
58
+ "ap_weighted": 0.5703048346742543,
59
+ "f1": 0.6123881965359441,
60
+ "f1_weighted": 0.6123881965359441
61
+ },
62
+ {
63
+ "accuracy": 0.662109375,
64
+ "ap": 0.6207451834439528,
65
+ "ap_weighted": 0.6207451834439528,
66
+ "f1": 0.6521817665982426,
67
+ "f1_weighted": 0.6521817665982426
68
+ },
69
+ {
70
+ "accuracy": 0.65673828125,
71
+ "ap": 0.6002633914001305,
72
+ "ap_weighted": 0.6002633914001305,
73
+ "f1": 0.65545475017344,
74
+ "f1_weighted": 0.65545475017344
75
+ },
76
+ {
77
+ "accuracy": 0.68798828125,
78
+ "ap": 0.6360240409044715,
79
+ "ap_weighted": 0.6360240409044715,
80
+ "f1": 0.68599922979748,
81
+ "f1_weighted": 0.68599922979748
82
+ },
83
+ {
84
+ "accuracy": 0.62353515625,
85
+ "ap": 0.5787352190214441,
86
+ "ap_weighted": 0.5787352190214441,
87
+ "f1": 0.6225805179392857,
88
+ "f1_weighted": 0.6225805179392857
89
+ }
90
+ ]
91
+ }
92
+ ]
93
+ },
94
+ "task_name": "InappropriatenessClassification"
95
+ }
results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/KinopoiskClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "5911f26666ac11af46cb9c6849d0dc80a378af24",
3
+ "evaluation_time": 29.96605682373047,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.6805333333333333,
10
+ "f1": 0.6573794252818923,
11
+ "f1_weighted": 0.6573794252818923,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.6805333333333333,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.678,
20
+ "f1": 0.6377280766051289,
21
+ "f1_weighted": 0.6377280766051289
22
+ },
23
+ {
24
+ "accuracy": 0.6746666666666666,
25
+ "f1": 0.6409336771730031,
26
+ "f1_weighted": 0.6409336771730032
27
+ },
28
+ {
29
+ "accuracy": 0.6866666666666666,
30
+ "f1": 0.6739421240857763,
31
+ "f1_weighted": 0.6739421240857763
32
+ },
33
+ {
34
+ "accuracy": 0.6826666666666666,
35
+ "f1": 0.6463135295490857,
36
+ "f1_weighted": 0.6463135295490857
37
+ },
38
+ {
39
+ "accuracy": 0.6713333333333333,
40
+ "f1": 0.6548412802291479,
41
+ "f1_weighted": 0.6548412802291479
42
+ },
43
+ {
44
+ "accuracy": 0.6806666666666666,
45
+ "f1": 0.6681098025787601,
46
+ "f1_weighted": 0.6681098025787601
47
+ },
48
+ {
49
+ "accuracy": 0.684,
50
+ "f1": 0.6617613091645753,
51
+ "f1_weighted": 0.6617613091645753
52
+ },
53
+ {
54
+ "accuracy": 0.6793333333333333,
55
+ "f1": 0.6670307794205573,
56
+ "f1_weighted": 0.6670307794205573
57
+ },
58
+ {
59
+ "accuracy": 0.6773333333333333,
60
+ "f1": 0.6493045481410312,
61
+ "f1_weighted": 0.6493045481410311
62
+ },
63
+ {
64
+ "accuracy": 0.6906666666666667,
65
+ "f1": 0.673829125871857,
66
+ "f1_weighted": 0.673829125871857
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "KinopoiskClassification"
73
+ }
results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/MIRACLReranking.json ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "6d1962c527217f8927fca80f890f14f36b2802af",
3
+ "evaluation_time": 1758.0768535137177,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "dev": [
8
+ {
9
+ "MAP@1(MIRACL)": 0.346,
10
+ "MAP@10(MIRACL)": 0.54471,
11
+ "MAP@100(MIRACL)": 0.56152,
12
+ "MAP@1000(MIRACL)": 0.56152,
13
+ "MAP@20(MIRACL)": 0.55649,
14
+ "MAP@3(MIRACL)": 0.47676,
15
+ "MAP@5(MIRACL)": 0.51176,
16
+ "NDCG@1(MIRACL)": 0.57578,
17
+ "NDCG@10(MIRACL)": 0.61465,
18
+ "NDCG@100(MIRACL)": 0.64874,
19
+ "NDCG@1000(MIRACL)": 0.64874,
20
+ "NDCG@20(MIRACL)": 0.63527,
21
+ "NDCG@3(MIRACL)": 0.56695,
22
+ "NDCG@5(MIRACL)": 0.58104,
23
+ "P@1(MIRACL)": 0.57578,
24
+ "P@10(MIRACL)": 0.15822,
25
+ "P@100(MIRACL)": 0.01953,
26
+ "P@1000(MIRACL)": 0.00195,
27
+ "P@20(MIRACL)": 0.08877,
28
+ "P@3(MIRACL)": 0.34937,
29
+ "P@5(MIRACL)": 0.25453,
30
+ "Recall@1(MIRACL)": 0.346,
31
+ "Recall@10(MIRACL)": 0.69763,
32
+ "Recall@100(MIRACL)": 0.79952,
33
+ "Recall@1000(MIRACL)": 0.79952,
34
+ "Recall@20(MIRACL)": 0.75403,
35
+ "Recall@3(MIRACL)": 0.53613,
36
+ "Recall@5(MIRACL)": 0.60934,
37
+ "hf_subset": "ru",
38
+ "languages": [
39
+ "rus-Cyrl"
40
+ ],
41
+ "main_score": 0.61465,
42
+ "nAUC_MAP@1000_diff1(MIRACL)": 0.05844585273238183,
43
+ "nAUC_MAP@1000_max(MIRACL)": 0.34724373194542624,
44
+ "nAUC_MAP@1000_std(MIRACL)": 0.14760716736478258,
45
+ "nAUC_MAP@100_diff1(MIRACL)": 0.05844585273238183,
46
+ "nAUC_MAP@100_max(MIRACL)": 0.34724373194542624,
47
+ "nAUC_MAP@100_std(MIRACL)": 0.14760716736478258,
48
+ "nAUC_MAP@10_diff1(MIRACL)": 0.0732187899791395,
49
+ "nAUC_MAP@10_max(MIRACL)": 0.33165340876637167,
50
+ "nAUC_MAP@10_std(MIRACL)": 0.13961470279685959,
51
+ "nAUC_MAP@1_diff1(MIRACL)": 0.26164679559463233,
52
+ "nAUC_MAP@1_max(MIRACL)": 0.1757816625864153,
53
+ "nAUC_MAP@1_std(MIRACL)": 0.03927142319306305,
54
+ "nAUC_MAP@20_diff1(MIRACL)": 0.06254085023812561,
55
+ "nAUC_MAP@20_max(MIRACL)": 0.34130843137634587,
56
+ "nAUC_MAP@20_std(MIRACL)": 0.14785631770279759,
57
+ "nAUC_MAP@3_diff1(MIRACL)": 0.12825900860644904,
58
+ "nAUC_MAP@3_max(MIRACL)": 0.27314510688262283,
59
+ "nAUC_MAP@3_std(MIRACL)": 0.09660602237538976,
60
+ "nAUC_MAP@5_diff1(MIRACL)": 0.10182116997059602,
61
+ "nAUC_MAP@5_max(MIRACL)": 0.2978782150409419,
62
+ "nAUC_MAP@5_std(MIRACL)": 0.11534764072812596,
63
+ "nAUC_NDCG@1000_diff1(MIRACL)": -0.030675764249711433,
64
+ "nAUC_NDCG@1000_max(MIRACL)": 0.4196011528049093,
65
+ "nAUC_NDCG@1000_std(MIRACL)": 0.18703000688244392,
66
+ "nAUC_NDCG@100_diff1(MIRACL)": -0.030675764249711433,
67
+ "nAUC_NDCG@100_max(MIRACL)": 0.4196011528049093,
68
+ "nAUC_NDCG@100_std(MIRACL)": 0.18703000688244392,
69
+ "nAUC_NDCG@10_diff1(MIRACL)": 0.009007660567953032,
70
+ "nAUC_NDCG@10_max(MIRACL)": 0.38840471576527,
71
+ "nAUC_NDCG@10_std(MIRACL)": 0.17528933537345287,
72
+ "nAUC_NDCG@1_diff1(MIRACL)": 0.12446653127685305,
73
+ "nAUC_NDCG@1_max(MIRACL)": 0.39998884728041634,
74
+ "nAUC_NDCG@1_std(MIRACL)": 0.14948980169454756,
75
+ "nAUC_NDCG@20_diff1(MIRACL)": -0.01594311913405895,
76
+ "nAUC_NDCG@20_max(MIRACL)": 0.40436637321725566,
77
+ "nAUC_NDCG@20_std(MIRACL)": 0.18884346313715042,
78
+ "nAUC_NDCG@3_diff1(MIRACL)": 0.0539060069241399,
79
+ "nAUC_NDCG@3_max(MIRACL)": 0.3630180722549369,
80
+ "nAUC_NDCG@3_std(MIRACL)": 0.14471287342377956,
81
+ "nAUC_NDCG@5_diff1(MIRACL)": 0.04319015283541723,
82
+ "nAUC_NDCG@5_max(MIRACL)": 0.3562966654168981,
83
+ "nAUC_NDCG@5_std(MIRACL)": 0.149283633523917,
84
+ "nAUC_P@1000_diff1(MIRACL)": -0.24754196996243197,
85
+ "nAUC_P@1000_max(MIRACL)": 0.27250891558928475,
86
+ "nAUC_P@1000_std(MIRACL)": 0.15102015255467327,
87
+ "nAUC_P@100_diff1(MIRACL)": -0.24754196996243175,
88
+ "nAUC_P@100_max(MIRACL)": 0.2725089155892854,
89
+ "nAUC_P@100_std(MIRACL)": 0.15102015255467355,
90
+ "nAUC_P@10_diff1(MIRACL)": -0.22861895405414923,
91
+ "nAUC_P@10_max(MIRACL)": 0.30596671571468875,
92
+ "nAUC_P@10_std(MIRACL)": 0.18843580349886327,
93
+ "nAUC_P@1_diff1(MIRACL)": 0.12446653127685305,
94
+ "nAUC_P@1_max(MIRACL)": 0.39998884728041634,
95
+ "nAUC_P@1_std(MIRACL)": 0.14948980169454756,
96
+ "nAUC_P@20_diff1(MIRACL)": -0.24633702067592644,
97
+ "nAUC_P@20_max(MIRACL)": 0.2868898139059664,
98
+ "nAUC_P@20_std(MIRACL)": 0.18673389653955133,
99
+ "nAUC_P@3_diff1(MIRACL)": -0.17545494495988545,
100
+ "nAUC_P@3_max(MIRACL)": 0.35260634279077574,
101
+ "nAUC_P@3_std(MIRACL)": 0.17859334573912375,
102
+ "nAUC_P@5_diff1(MIRACL)": -0.20846168857855932,
103
+ "nAUC_P@5_max(MIRACL)": 0.3233255815077814,
104
+ "nAUC_P@5_std(MIRACL)": 0.18689737668991488,
105
+ "nAUC_Recall@1000_diff1(MIRACL)": -0.33166292530333225,
106
+ "nAUC_Recall@1000_max(MIRACL)": 0.5630072103768957,
107
+ "nAUC_Recall@1000_std(MIRACL)": 0.2613299642929344,
108
+ "nAUC_Recall@100_diff1(MIRACL)": -0.33166292530333225,
109
+ "nAUC_Recall@100_max(MIRACL)": 0.5630072103768957,
110
+ "nAUC_Recall@100_std(MIRACL)": 0.2613299642929344,
111
+ "nAUC_Recall@10_diff1(MIRACL)": -0.11851664614856665,
112
+ "nAUC_Recall@10_max(MIRACL)": 0.40891368337706835,
113
+ "nAUC_Recall@10_std(MIRACL)": 0.2043278726016832,
114
+ "nAUC_Recall@1_diff1(MIRACL)": 0.26164679559463233,
115
+ "nAUC_Recall@1_max(MIRACL)": 0.1757816625864153,
116
+ "nAUC_Recall@1_std(MIRACL)": 0.03927142319306305,
117
+ "nAUC_Recall@20_diff1(MIRACL)": -0.22475522212535015,
118
+ "nAUC_Recall@20_max(MIRACL)": 0.4692190129269281,
119
+ "nAUC_Recall@20_std(MIRACL)": 0.2541055155891416,
120
+ "nAUC_Recall@3_diff1(MIRACL)": 0.05217300854332576,
121
+ "nAUC_Recall@3_max(MIRACL)": 0.2794215773373771,
122
+ "nAUC_Recall@3_std(MIRACL)": 0.10928630701331563,
123
+ "nAUC_Recall@5_diff1(MIRACL)": -0.011142669330513005,
124
+ "nAUC_Recall@5_max(MIRACL)": 0.31233821299281755,
125
+ "nAUC_Recall@5_std(MIRACL)": 0.13837108609230428
126
+ }
127
+ ]
128
+ },
129
+ "task_name": "MIRACLReranking"
130
+ }
results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/MassiveIntentClassification.json ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "4672e20407010da34463acc759c162ca9734bca6",
3
+ "evaluation_time": 210.42088508605957,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.7336583725622058,
10
+ "f1": 0.7041486844338799,
11
+ "f1_weighted": 0.7219001292766163,
12
+ "hf_subset": "ru",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.7336583725622058,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.7410894418291862,
20
+ "f1": 0.71036265711149,
21
+ "f1_weighted": 0.7297361186854204
22
+ },
23
+ {
24
+ "accuracy": 0.7471418964357768,
25
+ "f1": 0.7096025161995269,
26
+ "f1_weighted": 0.7382751725849083
27
+ },
28
+ {
29
+ "accuracy": 0.7242770679219905,
30
+ "f1": 0.6956569622589873,
31
+ "f1_weighted": 0.7138835702751832
32
+ },
33
+ {
34
+ "accuracy": 0.7501681237390719,
35
+ "f1": 0.709203702226879,
36
+ "f1_weighted": 0.7407402980045585
37
+ },
38
+ {
39
+ "accuracy": 0.7407531943510424,
40
+ "f1": 0.7087779173279923,
41
+ "f1_weighted": 0.7300548958336911
42
+ },
43
+ {
44
+ "accuracy": 0.7269670477471419,
45
+ "f1": 0.7118757006668955,
46
+ "f1_weighted": 0.7194101903409768
47
+ },
48
+ {
49
+ "accuracy": 0.7326832548755884,
50
+ "f1": 0.7001322294482387,
51
+ "f1_weighted": 0.7171119326353882
52
+ },
53
+ {
54
+ "accuracy": 0.7239408204438467,
55
+ "f1": 0.683906516901999,
56
+ "f1_weighted": 0.7110453438377231
57
+ },
58
+ {
59
+ "accuracy": 0.6997310020174848,
60
+ "f1": 0.687285247942128,
61
+ "f1_weighted": 0.6756161892733505
62
+ },
63
+ {
64
+ "accuracy": 0.7498318762609281,
65
+ "f1": 0.7246833942546619,
66
+ "f1_weighted": 0.7431275812949625
67
+ }
68
+ ]
69
+ }
70
+ ],
71
+ "validation": [
72
+ {
73
+ "accuracy": 0.7424003935071324,
74
+ "f1": 0.7002602727697711,
75
+ "f1_weighted": 0.7281309209875871,
76
+ "hf_subset": "ru",
77
+ "languages": [
78
+ "rus-Cyrl"
79
+ ],
80
+ "main_score": 0.7424003935071324,
81
+ "scores_per_experiment": [
82
+ {
83
+ "accuracy": 0.7560255779636006,
84
+ "f1": 0.7059355862386306,
85
+ "f1_weighted": 0.7436840115840425
86
+ },
87
+ {
88
+ "accuracy": 0.7570093457943925,
89
+ "f1": 0.7061836448113953,
90
+ "f1_weighted": 0.7460450031376
91
+ },
92
+ {
93
+ "accuracy": 0.7397934087555337,
94
+ "f1": 0.6973484287084937,
95
+ "f1_weighted": 0.7259273846794938
96
+ },
97
+ {
98
+ "accuracy": 0.7565174618789966,
99
+ "f1": 0.70912944118439,
100
+ "f1_weighted": 0.7420127135102002
101
+ },
102
+ {
103
+ "accuracy": 0.7456960157402853,
104
+ "f1": 0.6981902622782278,
105
+ "f1_weighted": 0.7327054921113475
106
+ },
107
+ {
108
+ "accuracy": 0.7397934087555337,
109
+ "f1": 0.705913482623903,
110
+ "f1_weighted": 0.7310716749391933
111
+ },
112
+ {
113
+ "accuracy": 0.7397934087555337,
114
+ "f1": 0.6917552987665041,
115
+ "f1_weighted": 0.7226417855732911
116
+ },
117
+ {
118
+ "accuracy": 0.7304476143630103,
119
+ "f1": 0.6915723850018337,
120
+ "f1_weighted": 0.712978333943807
121
+ },
122
+ {
123
+ "accuracy": 0.7112641416625677,
124
+ "f1": 0.6894183243112603,
125
+ "f1_weighted": 0.6864684003227117
126
+ },
127
+ {
128
+ "accuracy": 0.7476635514018691,
129
+ "f1": 0.7071558737730729,
130
+ "f1_weighted": 0.7377744100741843
131
+ }
132
+ ]
133
+ }
134
+ ]
135
+ },
136
+ "task_name": "MassiveIntentClassification"
137
+ }
results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/MassiveScenarioClassification.json ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "fad2c6e8459f9e1c45d9315f4953d921437d70f8",
3
+ "evaluation_time": 131.11218643188477,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.7830195023537324,
10
+ "f1": 0.7744343255677341,
11
+ "f1_weighted": 0.7805999340491574,
12
+ "hf_subset": "ru",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.7830195023537324,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.7948890383322125,
20
+ "f1": 0.7880340018209849,
21
+ "f1_weighted": 0.792504173343861
22
+ },
23
+ {
24
+ "accuracy": 0.7827841291190316,
25
+ "f1": 0.7717606111829581,
26
+ "f1_weighted": 0.7797041272959886
27
+ },
28
+ {
29
+ "accuracy": 0.7817753866845999,
30
+ "f1": 0.769297118280224,
31
+ "f1_weighted": 0.7754497922223242
32
+ },
33
+ {
34
+ "accuracy": 0.8016139878950908,
35
+ "f1": 0.7899288902999394,
36
+ "f1_weighted": 0.7984304407649091
37
+ },
38
+ {
39
+ "accuracy": 0.7669804976462676,
40
+ "f1": 0.7550348736787605,
41
+ "f1_weighted": 0.7627427019624383
42
+ },
43
+ {
44
+ "accuracy": 0.7632817753866846,
45
+ "f1": 0.7554274492600168,
46
+ "f1_weighted": 0.7604792463903435
47
+ },
48
+ {
49
+ "accuracy": 0.7683254875588433,
50
+ "f1": 0.757113512393905,
51
+ "f1_weighted": 0.7661627633634723
52
+ },
53
+ {
54
+ "accuracy": 0.7864828513786146,
55
+ "f1": 0.7840084528456414,
56
+ "f1_weighted": 0.788638874471122
57
+ },
58
+ {
59
+ "accuracy": 0.7868190988567586,
60
+ "f1": 0.7840096742658867,
61
+ "f1_weighted": 0.7840384417100829
62
+ },
63
+ {
64
+ "accuracy": 0.7972427706792199,
65
+ "f1": 0.7897286716490226,
66
+ "f1_weighted": 0.7978487789670328
67
+ }
68
+ ]
69
+ }
70
+ ],
71
+ "validation": [
72
+ {
73
+ "accuracy": 0.7831775700934579,
74
+ "f1": 0.7719329305602339,
75
+ "f1_weighted": 0.7811961713256883,
76
+ "hf_subset": "ru",
77
+ "languages": [
78
+ "rus-Cyrl"
79
+ ],
80
+ "main_score": 0.7831775700934579,
81
+ "scores_per_experiment": [
82
+ {
83
+ "accuracy": 0.7973438268568618,
84
+ "f1": 0.7886264490986417,
85
+ "f1_weighted": 0.7936522133640386
86
+ },
87
+ {
88
+ "accuracy": 0.7914412198721101,
89
+ "f1": 0.7792730068980979,
90
+ "f1_weighted": 0.7898653312212526
91
+ },
92
+ {
93
+ "accuracy": 0.794392523364486,
94
+ "f1": 0.7771056216258827,
95
+ "f1_weighted": 0.7906276949611155
96
+ },
97
+ {
98
+ "accuracy": 0.7870142646335465,
99
+ "f1": 0.7728106624234612,
100
+ "f1_weighted": 0.7832769961088517
101
+ },
102
+ {
103
+ "accuracy": 0.7791441219872111,
104
+ "f1": 0.7677309559227985,
105
+ "f1_weighted": 0.7763075502546062
106
+ },
107
+ {
108
+ "accuracy": 0.7575012297097885,
109
+ "f1": 0.7498870802404436,
110
+ "f1_weighted": 0.7556708468162812
111
+ },
112
+ {
113
+ "accuracy": 0.7771765863256271,
114
+ "f1": 0.7579729889438993,
115
+ "f1_weighted": 0.7762030042948803
116
+ },
117
+ {
118
+ "accuracy": 0.778160354156419,
119
+ "f1": 0.7673291581947205,
120
+ "f1_weighted": 0.7787982318100919
121
+ },
122
+ {
123
+ "accuracy": 0.778652238071815,
124
+ "f1": 0.7747785508941001,
125
+ "f1_weighted": 0.775268898607672
126
+ },
127
+ {
128
+ "accuracy": 0.7909493359567142,
129
+ "f1": 0.7838148313602934,
130
+ "f1_weighted": 0.7922909458180922
131
+ }
132
+ ]
133
+ }
134
+ ]
135
+ },
136
+ "task_name": "MassiveScenarioClassification"
137
+ }
results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/RUParaPhraserSTS.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "43265056790b8f7c59e0139acb4be0a8dad2c8f4",
3
+ "evaluation_time": 49.408578872680664,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "cosine_pearson": 0.6715121089349667,
10
+ "cosine_spearman": 0.7251027072326818,
11
+ "euclidean_pearson": 0.7004153159496793,
12
+ "euclidean_spearman": 0.7251032302004005,
13
+ "hf_subset": "default",
14
+ "languages": [
15
+ "rus-Cyrl"
16
+ ],
17
+ "main_score": 0.7251027072326818,
18
+ "manhattan_pearson": 0.6986309216548198,
19
+ "manhattan_spearman": 0.7233699855486297,
20
+ "pearson": 0.6715121089349667,
21
+ "spearman": 0.7251027072326818
22
+ }
23
+ ]
24
+ },
25
+ "task_name": "RUParaPhraserSTS"
26
+ }
results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/RiaNewsRetrieval.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "82374b0bbacda6114f39ff9c5b925fa1512ca5d7",
3
+ "evaluation_time": 10108.248116254807,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.4",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.71889,
14
+ "map_at_1": 0.5837,
15
+ "map_at_10": 0.67673,
16
+ "map_at_100": 0.68089,
17
+ "map_at_1000": 0.68103,
18
+ "map_at_20": 0.67945,
19
+ "map_at_3": 0.6586,
20
+ "map_at_5": 0.66983,
21
+ "mrr_at_1": 0.5838,
22
+ "mrr_at_10": 0.676781626984127,
23
+ "mrr_at_100": 0.680935777612291,
24
+ "mrr_at_1000": 0.6810832150627617,
25
+ "mrr_at_20": 0.6795025166852412,
26
+ "mrr_at_3": 0.65865,
27
+ "mrr_at_5": 0.669875,
28
+ "nauc_map_at_1000_diff1": 0.6955742124142791,
29
+ "nauc_map_at_1000_max": 0.22593930010788726,
30
+ "nauc_map_at_1000_std": -0.05815150970819692,
31
+ "nauc_map_at_100_diff1": 0.6955140431724868,
32
+ "nauc_map_at_100_max": 0.2259497052936759,
33
+ "nauc_map_at_100_std": -0.05804012691261324,
34
+ "nauc_map_at_10_diff1": 0.6948486177470656,
35
+ "nauc_map_at_10_max": 0.22561832372989116,
36
+ "nauc_map_at_10_std": -0.059483960308164274,
37
+ "nauc_map_at_1_diff1": 0.7319377282977862,
38
+ "nauc_map_at_1_max": 0.2015248095554425,
39
+ "nauc_map_at_1_std": -0.07629133001341516,
40
+ "nauc_map_at_20_diff1": 0.6952100544527767,
41
+ "nauc_map_at_20_max": 0.22592622708874877,
42
+ "nauc_map_at_20_std": -0.05858117975823284,
43
+ "nauc_map_at_3_diff1": 0.6964801903913905,
44
+ "nauc_map_at_3_max": 0.22224112715863723,
45
+ "nauc_map_at_3_std": -0.06557313565196024,
46
+ "nauc_map_at_5_diff1": 0.6947038531654834,
47
+ "nauc_map_at_5_max": 0.22544684725819542,
48
+ "nauc_map_at_5_std": -0.06039937424924277,
49
+ "nauc_mrr_at_1000_diff1": 0.6954296311193227,
50
+ "nauc_mrr_at_1000_max": 0.2258481821647905,
51
+ "nauc_mrr_at_1000_std": -0.05817915886917972,
52
+ "nauc_mrr_at_100_diff1": 0.6953695397697215,
53
+ "nauc_mrr_at_100_max": 0.22585863976951523,
54
+ "nauc_mrr_at_100_std": -0.05806775271029188,
55
+ "nauc_mrr_at_10_diff1": 0.6947061160177611,
56
+ "nauc_mrr_at_10_max": 0.22552852562852763,
57
+ "nauc_mrr_at_10_std": -0.05951129286558239,
58
+ "nauc_mrr_at_1_diff1": 0.7317073952669194,
59
+ "nauc_mrr_at_1_max": 0.20137435892055278,
60
+ "nauc_mrr_at_1_std": -0.0763381827603582,
61
+ "nauc_mrr_at_20_diff1": 0.6950662871522489,
62
+ "nauc_mrr_at_20_max": 0.22583563697519174,
63
+ "nauc_mrr_at_20_std": -0.058608697931486896,
64
+ "nauc_mrr_at_3_diff1": 0.6963450244832373,
65
+ "nauc_mrr_at_3_max": 0.22215566769782727,
66
+ "nauc_mrr_at_3_std": -0.06559947397545805,
67
+ "nauc_mrr_at_5_diff1": 0.6945645041090036,
68
+ "nauc_mrr_at_5_max": 0.22535903049732317,
69
+ "nauc_mrr_at_5_std": -0.06042616106559084,
70
+ "nauc_ndcg_at_1000_diff1": 0.685333247276125,
71
+ "nauc_ndcg_at_1000_max": 0.23766253491764136,
72
+ "nauc_ndcg_at_1000_std": -0.04379543908560062,
73
+ "nauc_ndcg_at_100_diff1": 0.6836027819260406,
74
+ "nauc_ndcg_at_100_max": 0.2383513425844018,
75
+ "nauc_ndcg_at_100_std": -0.03976281101141971,
76
+ "nauc_ndcg_at_10_diff1": 0.6798803384225631,
77
+ "nauc_ndcg_at_10_max": 0.2366515620861918,
78
+ "nauc_ndcg_at_10_std": -0.04750581767516082,
79
+ "nauc_ndcg_at_1_diff1": 0.7319377282977862,
80
+ "nauc_ndcg_at_1_max": 0.2015248095554425,
81
+ "nauc_ndcg_at_1_std": -0.07629133001341516,
82
+ "nauc_ndcg_at_20_diff1": 0.6808895785463664,
83
+ "nauc_ndcg_at_20_max": 0.23817315440704365,
84
+ "nauc_ndcg_at_20_std": -0.04325208883730105,
85
+ "nauc_ndcg_at_3_diff1": 0.6845277361051741,
86
+ "nauc_ndcg_at_3_max": 0.22923162367527397,
87
+ "nauc_ndcg_at_3_std": -0.06096897758992976,
88
+ "nauc_ndcg_at_5_diff1": 0.6804573515323572,
89
+ "nauc_ndcg_at_5_max": 0.23565439508892008,
90
+ "nauc_ndcg_at_5_std": -0.05077785451053942,
91
+ "nauc_precision_at_1000_diff1": 0.49319155416100613,
92
+ "nauc_precision_at_1000_max": 0.5908260299828035,
93
+ "nauc_precision_at_1000_std": 0.35106574818768727,
94
+ "nauc_precision_at_100_diff1": 0.5528740559094314,
95
+ "nauc_precision_at_100_max": 0.4097560732478041,
96
+ "nauc_precision_at_100_std": 0.24965276739402667,
97
+ "nauc_precision_at_10_diff1": 0.6001251873425006,
98
+ "nauc_precision_at_10_max": 0.296426996029996,
99
+ "nauc_precision_at_10_std": 0.02129906392090402,
100
+ "nauc_precision_at_1_diff1": 0.7319377282977862,
101
+ "nauc_precision_at_1_max": 0.2015248095554425,
102
+ "nauc_precision_at_1_std": -0.07629133001341516,
103
+ "nauc_precision_at_20_diff1": 0.5842114816335258,
104
+ "nauc_precision_at_20_max": 0.32624326121547537,
105
+ "nauc_precision_at_20_std": 0.07690356879921746,
106
+ "nauc_precision_at_3_diff1": 0.6422094187275297,
107
+ "nauc_precision_at_3_max": 0.2539849114930569,
108
+ "nauc_precision_at_3_std": -0.044246875963812916,
109
+ "nauc_precision_at_5_diff1": 0.6213971746098673,
110
+ "nauc_precision_at_5_max": 0.2786243041491821,
111
+ "nauc_precision_at_5_std": -0.008763731908253832,
112
+ "nauc_recall_at_1000_diff1": 0.4931915541610098,
113
+ "nauc_recall_at_1000_max": 0.5908260299828074,
114
+ "nauc_recall_at_1000_std": 0.35106574818768893,
115
+ "nauc_recall_at_100_diff1": 0.5528740559094334,
116
+ "nauc_recall_at_100_max": 0.40975607324780505,
117
+ "nauc_recall_at_100_std": 0.24965276739402947,
118
+ "nauc_recall_at_10_diff1": 0.6001251873425009,
119
+ "nauc_recall_at_10_max": 0.29642699602999556,
120
+ "nauc_recall_at_10_std": 0.021299063920903794,
121
+ "nauc_recall_at_1_diff1": 0.7319377282977862,
122
+ "nauc_recall_at_1_max": 0.2015248095554425,
123
+ "nauc_recall_at_1_std": -0.07629133001341516,
124
+ "nauc_recall_at_20_diff1": 0.584211481633525,
125
+ "nauc_recall_at_20_max": 0.3262432612154751,
126
+ "nauc_recall_at_20_std": 0.07690356879921681,
127
+ "nauc_recall_at_3_diff1": 0.6422094187275298,
128
+ "nauc_recall_at_3_max": 0.2539849114930569,
129
+ "nauc_recall_at_3_std": -0.044246875963812014,
130
+ "nauc_recall_at_5_diff1": 0.621397174609867,
131
+ "nauc_recall_at_5_max": 0.27862430414918327,
132
+ "nauc_recall_at_5_std": -0.008763731908251915,
133
+ "ndcg_at_1": 0.5837,
134
+ "ndcg_at_10": 0.71889,
135
+ "ndcg_at_100": 0.73902,
136
+ "ndcg_at_1000": 0.74314,
137
+ "ndcg_at_20": 0.72872,
138
+ "ndcg_at_3": 0.68217,
139
+ "ndcg_at_5": 0.70237,
140
+ "precision_at_1": 0.5837,
141
+ "precision_at_10": 0.08497,
142
+ "precision_at_100": 0.00944,
143
+ "precision_at_1000": 0.00098,
144
+ "precision_at_20": 0.04442,
145
+ "precision_at_3": 0.25003,
146
+ "precision_at_5": 0.15982,
147
+ "recall_at_1": 0.5837,
148
+ "recall_at_10": 0.8497,
149
+ "recall_at_100": 0.9438,
150
+ "recall_at_1000": 0.9767,
151
+ "recall_at_20": 0.8884,
152
+ "recall_at_3": 0.7501,
153
+ "recall_at_5": 0.7991
154
+ }
155
+ ]
156
+ },
157
+ "task_name": "RiaNewsRetrieval"
158
+ }
results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/RuBQReranking.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "2e96b8f098fa4b0950fc58eacadeb31c0d0c7fa2",
3
+ "evaluation_time": 549.4406032562256,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.7387568855586618,
14
+ "map": 0.7387568855586618,
15
+ "mrr": 0.7947627234861276,
16
+ "nAUC_map_diff1": 0.4185378603074729,
17
+ "nAUC_map_max": 0.2557231510377531,
18
+ "nAUC_map_std": 0.13128175699927613,
19
+ "nAUC_mrr_diff1": 0.47436093079736,
20
+ "nAUC_mrr_max": 0.32174764627694347,
21
+ "nAUC_mrr_std": 0.17102903008849993
22
+ }
23
+ ]
24
+ },
25
+ "task_name": "RuBQReranking"
26
+ }
results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/RuBQRetrieval.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "e19b6ffa60b3bc248e0b41f4cc37c26a55c2a67b",
3
+ "evaluation_time": 784.6238145828247,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.7084,
14
+ "map_at_1": 0.43332,
15
+ "map_at_10": 0.63053,
16
+ "map_at_100": 0.63993,
17
+ "map_at_1000": 0.64026,
18
+ "map_at_20": 0.63709,
19
+ "map_at_3": 0.57886,
20
+ "map_at_5": 0.61114,
21
+ "mrr_at_1": 0.6199763593380615,
22
+ "mrr_at_10": 0.7245206199106908,
23
+ "mrr_at_100": 0.7273490906521782,
24
+ "mrr_at_1000": 0.7274286731850885,
25
+ "mrr_at_20": 0.7264379350659075,
26
+ "mrr_at_3": 0.7035066981875492,
27
+ "mrr_at_5": 0.7172182821118991,
28
+ "nauc_map_at_1000_diff1": 0.4098493844311728,
29
+ "nauc_map_at_1000_max": 0.31177707877202504,
30
+ "nauc_map_at_1000_std": -0.04566651903378437,
31
+ "nauc_map_at_100_diff1": 0.40952562105342194,
32
+ "nauc_map_at_100_max": 0.31169297709731714,
33
+ "nauc_map_at_100_std": -0.04523866869295098,
34
+ "nauc_map_at_10_diff1": 0.40711399489725486,
35
+ "nauc_map_at_10_max": 0.3108701521245131,
36
+ "nauc_map_at_10_std": -0.04967984333609782,
37
+ "nauc_map_at_1_diff1": 0.45449485733217426,
38
+ "nauc_map_at_1_max": 0.2272541734394137,
39
+ "nauc_map_at_1_std": -0.057269877241334324,
40
+ "nauc_map_at_20_diff1": 0.40831577007417696,
41
+ "nauc_map_at_20_max": 0.3114967925232305,
42
+ "nauc_map_at_20_std": -0.04601751526875259,
43
+ "nauc_map_at_3_diff1": 0.4083213088622995,
44
+ "nauc_map_at_3_max": 0.2918533799320895,
45
+ "nauc_map_at_3_std": -0.06555391990203349,
46
+ "nauc_map_at_5_diff1": 0.4033711250277714,
47
+ "nauc_map_at_5_max": 0.30257732148218364,
48
+ "nauc_map_at_5_std": -0.0631614517998428,
49
+ "nauc_mrr_at_1000_diff1": 0.4943221498869888,
50
+ "nauc_mrr_at_1000_max": 0.3613450471452896,
51
+ "nauc_mrr_at_1000_std": -0.054787367722350176,
52
+ "nauc_mrr_at_100_diff1": 0.49427485634008067,
53
+ "nauc_mrr_at_100_max": 0.3612967269520609,
54
+ "nauc_mrr_at_100_std": -0.05468894676941939,
55
+ "nauc_mrr_at_10_diff1": 0.4940433471901002,
56
+ "nauc_mrr_at_10_max": 0.3637456812378933,
57
+ "nauc_mrr_at_10_std": -0.053550809807821634,
58
+ "nauc_mrr_at_1_diff1": 0.524379377394298,
59
+ "nauc_mrr_at_1_max": 0.33024859326180706,
60
+ "nauc_mrr_at_1_std": -0.06920671552303741,
61
+ "nauc_mrr_at_20_diff1": 0.4942583856242923,
62
+ "nauc_mrr_at_20_max": 0.361520220767161,
63
+ "nauc_mrr_at_20_std": -0.05516540683278387,
64
+ "nauc_mrr_at_3_diff1": 0.48552814894456764,
65
+ "nauc_mrr_at_3_max": 0.36097653693631854,
66
+ "nauc_mrr_at_3_std": -0.056742983019184304,
67
+ "nauc_mrr_at_5_diff1": 0.4871754747833668,
68
+ "nauc_mrr_at_5_max": 0.3614729667028967,
69
+ "nauc_mrr_at_5_std": -0.057561200455925894,
70
+ "nauc_ndcg_at_1000_diff1": 0.4267257023809727,
71
+ "nauc_ndcg_at_1000_max": 0.3380181226065005,
72
+ "nauc_ndcg_at_1000_std": -0.029975086176097218,
73
+ "nauc_ndcg_at_100_diff1": 0.42047993425371843,
74
+ "nauc_ndcg_at_100_max": 0.33725479430617705,
75
+ "nauc_ndcg_at_100_std": -0.020038862668499628,
76
+ "nauc_ndcg_at_10_diff1": 0.41372994897265053,
77
+ "nauc_ndcg_at_10_max": 0.3402185054238364,
78
+ "nauc_ndcg_at_10_std": -0.032482077629039076,
79
+ "nauc_ndcg_at_1_diff1": 0.524379377394298,
80
+ "nauc_ndcg_at_1_max": 0.33024859326180706,
81
+ "nauc_ndcg_at_1_std": -0.06920671552303741,
82
+ "nauc_ndcg_at_20_diff1": 0.4157809742540088,
83
+ "nauc_ndcg_at_20_max": 0.33753169142968914,
84
+ "nauc_ndcg_at_20_std": -0.026101116562713023,
85
+ "nauc_ndcg_at_3_diff1": 0.4089658577309038,
86
+ "nauc_ndcg_at_3_max": 0.31276369584128266,
87
+ "nauc_ndcg_at_3_std": -0.05993983038697398,
88
+ "nauc_ndcg_at_5_diff1": 0.40360054168811305,
89
+ "nauc_ndcg_at_5_max": 0.32451996935897526,
90
+ "nauc_ndcg_at_5_std": -0.057496261019184904,
91
+ "nauc_precision_at_1000_diff1": -0.1039335981528995,
92
+ "nauc_precision_at_1000_max": 0.031305504944271374,
93
+ "nauc_precision_at_1000_std": 0.04291631348500297,
94
+ "nauc_precision_at_100_diff1": -0.09751197473832654,
95
+ "nauc_precision_at_100_max": 0.059270886848049154,
96
+ "nauc_precision_at_100_std": 0.08155218308146268,
97
+ "nauc_precision_at_10_diff1": -0.029614761682125797,
98
+ "nauc_precision_at_10_max": 0.1470431677304054,
99
+ "nauc_precision_at_10_std": 0.05620191569349491,
100
+ "nauc_precision_at_1_diff1": 0.524379377394298,
101
+ "nauc_precision_at_1_max": 0.33024859326180706,
102
+ "nauc_precision_at_1_std": -0.06920671552303741,
103
+ "nauc_precision_at_20_diff1": -0.06502174226456801,
104
+ "nauc_precision_at_20_max": 0.10619995806354679,
105
+ "nauc_precision_at_20_std": 0.07597531058913329,
106
+ "nauc_precision_at_3_diff1": 0.1136659799581031,
107
+ "nauc_precision_at_3_max": 0.2339666948885143,
108
+ "nauc_precision_at_3_std": -0.019247497680378353,
109
+ "nauc_precision_at_5_diff1": 0.029822992344513077,
110
+ "nauc_precision_at_5_max": 0.18940176550320303,
111
+ "nauc_precision_at_5_std": 0.0009625435506027025,
112
+ "nauc_recall_at_1000_diff1": 0.2776275954706104,
113
+ "nauc_recall_at_1000_max": 0.6315289106882999,
114
+ "nauc_recall_at_1000_std": 0.7288279620071334,
115
+ "nauc_recall_at_100_diff1": 0.18254957860285773,
116
+ "nauc_recall_at_100_max": 0.348275786234804,
117
+ "nauc_recall_at_100_std": 0.40769676520874576,
118
+ "nauc_recall_at_10_diff1": 0.28734906439784297,
119
+ "nauc_recall_at_10_max": 0.350678923614868,
120
+ "nauc_recall_at_10_std": 0.05565517533474942,
121
+ "nauc_recall_at_1_diff1": 0.45449485733217426,
122
+ "nauc_recall_at_1_max": 0.2272541734394137,
123
+ "nauc_recall_at_1_std": -0.057269877241334324,
124
+ "nauc_recall_at_20_diff1": 0.25111298116248404,
125
+ "nauc_recall_at_20_max": 0.33170412942756444,
126
+ "nauc_recall_at_20_std": 0.12211197873972823,
127
+ "nauc_recall_at_3_diff1": 0.32051875578747546,
128
+ "nauc_recall_at_3_max": 0.2803433904916248,
129
+ "nauc_recall_at_3_std": -0.04820404247875975,
130
+ "nauc_recall_at_5_diff1": 0.28342025003580124,
131
+ "nauc_recall_at_5_max": 0.30065661133125327,
132
+ "nauc_recall_at_5_std": -0.038787962818204645,
133
+ "ndcg_at_1": 0.61998,
134
+ "ndcg_at_10": 0.7084,
135
+ "ndcg_at_100": 0.73744,
136
+ "ndcg_at_1000": 0.74323,
137
+ "ndcg_at_20": 0.72441,
138
+ "ndcg_at_3": 0.63919,
139
+ "ndcg_at_5": 0.67598,
140
+ "precision_at_1": 0.61998,
141
+ "precision_at_10": 0.13712,
142
+ "precision_at_100": 0.01584,
143
+ "precision_at_1000": 0.00166,
144
+ "precision_at_20": 0.07376,
145
+ "precision_at_3": 0.34634,
146
+ "precision_at_5": 0.24173,
147
+ "recall_at_1": 0.43332,
148
+ "recall_at_10": 0.84009,
149
+ "recall_at_100": 0.95086,
150
+ "recall_at_1000": 0.98937,
151
+ "recall_at_20": 0.89235,
152
+ "recall_at_3": 0.66979,
153
+ "recall_at_5": 0.75729
154
+ }
155
+ ]
156
+ },
157
+ "task_name": "RuBQRetrieval"
158
+ }
results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/RuReviewsClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "f6d2c31f4dc6b88f468552750bfec05b4b41b05a",
3
+ "evaluation_time": 36.28613591194153,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.698095703125,
10
+ "f1": 0.6896768789132586,
11
+ "f1_weighted": 0.6896807821830067,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.698095703125,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.7275390625,
20
+ "f1": 0.7220694639014127,
21
+ "f1_weighted": 0.7220625619911872
22
+ },
23
+ {
24
+ "accuracy": 0.65478515625,
25
+ "f1": 0.6491362610344539,
26
+ "f1_weighted": 0.6491672807222408
27
+ },
28
+ {
29
+ "accuracy": 0.7109375,
30
+ "f1": 0.6952384221984715,
31
+ "f1_weighted": 0.6952186235993608
32
+ },
33
+ {
34
+ "accuracy": 0.68408203125,
35
+ "f1": 0.6839828621347107,
36
+ "f1_weighted": 0.684040452528405
37
+ },
38
+ {
39
+ "accuracy": 0.7373046875,
40
+ "f1": 0.7395317495184309,
41
+ "f1_weighted": 0.7395381702528763
42
+ },
43
+ {
44
+ "accuracy": 0.68212890625,
45
+ "f1": 0.6788792738133856,
46
+ "f1_weighted": 0.6788869502680129
47
+ },
48
+ {
49
+ "accuracy": 0.68017578125,
50
+ "f1": 0.674774307090292,
51
+ "f1_weighted": 0.6747852833857639
52
+ },
53
+ {
54
+ "accuracy": 0.70166015625,
55
+ "f1": 0.6879877165733109,
56
+ "f1_weighted": 0.6879796687559976
57
+ },
58
+ {
59
+ "accuracy": 0.6689453125,
60
+ "f1": 0.634339983172118,
61
+ "f1_weighted": 0.6342962843130933
62
+ },
63
+ {
64
+ "accuracy": 0.7333984375,
65
+ "f1": 0.7308287496959999,
66
+ "f1_weighted": 0.730832546013129
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "RuReviewsClassification"
73
+ }
results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/RuSTSBenchmarkSTS.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "7cf24f325c6da6195df55bef3d86b5e0616f3018",
3
+ "evaluation_time": 33.171607971191406,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "cosine_pearson": 0.7969696535182529,
10
+ "cosine_spearman": 0.7863818865780526,
11
+ "euclidean_pearson": 0.7856831649296786,
12
+ "euclidean_spearman": 0.7863818865780526,
13
+ "hf_subset": "default",
14
+ "languages": [
15
+ "rus-Cyrl"
16
+ ],
17
+ "main_score": 0.7863818865780526,
18
+ "manhattan_pearson": 0.7868536524192116,
19
+ "manhattan_spearman": 0.7884445888242286,
20
+ "pearson": 0.7969696535182529,
21
+ "spearman": 0.7863818865780526
22
+ }
23
+ ]
24
+ },
25
+ "task_name": "RuSTSBenchmarkSTS"
26
+ }
results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/RuSciBenchGRNTIClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "673a610d6d3dd91a547a0d57ae1b56f37ebbf6a1",
3
+ "evaluation_time": 65.67443513870239,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.6673828125,
10
+ "f1": 0.6564611808368113,
11
+ "f1_weighted": 0.6565678384778197,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.6673828125,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.68798828125,
20
+ "f1": 0.6810050944295518,
21
+ "f1_weighted": 0.6811133014806365
22
+ },
23
+ {
24
+ "accuracy": 0.66357421875,
25
+ "f1": 0.6511976944015947,
26
+ "f1_weighted": 0.6513117011798647
27
+ },
28
+ {
29
+ "accuracy": 0.65478515625,
30
+ "f1": 0.6411231743506376,
31
+ "f1_weighted": 0.6412676104371658
32
+ },
33
+ {
34
+ "accuracy": 0.67236328125,
35
+ "f1": 0.66135007733751,
36
+ "f1_weighted": 0.661463776056001
37
+ },
38
+ {
39
+ "accuracy": 0.671875,
40
+ "f1": 0.6629382495623598,
41
+ "f1_weighted": 0.6630279677814571
42
+ },
43
+ {
44
+ "accuracy": 0.66259765625,
45
+ "f1": 0.6530275032620626,
46
+ "f1_weighted": 0.6530952276222671
47
+ },
48
+ {
49
+ "accuracy": 0.67822265625,
50
+ "f1": 0.665913133789097,
51
+ "f1_weighted": 0.6660414948471616
52
+ },
53
+ {
54
+ "accuracy": 0.6591796875,
55
+ "f1": 0.6484716936319798,
56
+ "f1_weighted": 0.6485803505158273
57
+ },
58
+ {
59
+ "accuracy": 0.671875,
60
+ "f1": 0.6620798628890334,
61
+ "f1_weighted": 0.6621870228217818
62
+ },
63
+ {
64
+ "accuracy": 0.6513671875,
65
+ "f1": 0.6375053247142862,
66
+ "f1_weighted": 0.6375899320360338
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "RuSciBenchGRNTIClassification"
73
+ }
results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/RuSciBenchGRNTIClusteringP2P.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "673a610d6d3dd91a547a0d57ae1b56f37ebbf6a1",
3
+ "evaluation_time": 115.56734299659729,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.6140423807896046,
14
+ "v_measure": 0.6140423807896046,
15
+ "v_measure_std": 0.00557206978100521,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.6127788469180457,
19
+ 0.618699078427273,
20
+ 0.606794452399866,
21
+ 0.6088767832666402,
22
+ 0.6078227897415304,
23
+ 0.6127429855670982,
24
+ 0.6256263602773254,
25
+ 0.6162643186875588,
26
+ 0.6118913377965314,
27
+ 0.6189268548141774
28
+ ]
29
+ }
30
+ }
31
+ ]
32
+ },
33
+ "task_name": "RuSciBenchGRNTIClusteringP2P"
34
+ }
results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/RuSciBenchOECDClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "26c88e99dcaba32bb45d0e1bfc21902337f6d471",
3
+ "evaluation_time": 74.62318015098572,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.531494140625,
10
+ "f1": 0.5092538189981415,
11
+ "f1_weighted": 0.5092330972697919,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.531494140625,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.53515625,
20
+ "f1": 0.5120307795631852,
21
+ "f1_weighted": 0.5120687483399415
22
+ },
23
+ {
24
+ "accuracy": 0.5322265625,
25
+ "f1": 0.5083554434216956,
26
+ "f1_weighted": 0.5083144296186541
27
+ },
28
+ {
29
+ "accuracy": 0.5439453125,
30
+ "f1": 0.527283707549943,
31
+ "f1_weighted": 0.5273521003997528
32
+ },
33
+ {
34
+ "accuracy": 0.513671875,
35
+ "f1": 0.4757018495567095,
36
+ "f1_weighted": 0.4756329470679142
37
+ },
38
+ {
39
+ "accuracy": 0.53125,
40
+ "f1": 0.5074090398183793,
41
+ "f1_weighted": 0.5074314808861585
42
+ },
43
+ {
44
+ "accuracy": 0.54296875,
45
+ "f1": 0.5268638850045184,
46
+ "f1_weighted": 0.5268322149028193
47
+ },
48
+ {
49
+ "accuracy": 0.53076171875,
50
+ "f1": 0.5021852239085048,
51
+ "f1_weighted": 0.5021507495182084
52
+ },
53
+ {
54
+ "accuracy": 0.54248046875,
55
+ "f1": 0.5325792283658487,
56
+ "f1_weighted": 0.5325468536479304
57
+ },
58
+ {
59
+ "accuracy": 0.51513671875,
60
+ "f1": 0.48825906305928374,
61
+ "f1_weighted": 0.4881892459697636
62
+ },
63
+ {
64
+ "accuracy": 0.52734375,
65
+ "f1": 0.5118699697333468,
66
+ "f1_weighted": 0.5118122023467767
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "RuSciBenchOECDClassification"
73
+ }
results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/RuSciBenchOECDClusteringP2P.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "26c88e99dcaba32bb45d0e1bfc21902337f6d471",
3
+ "evaluation_time": 87.11343717575073,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.5293031451453222,
14
+ "v_measure": 0.5293031451453222,
15
+ "v_measure_std": 0.006817383450301443,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.5275782837034745,
19
+ 0.5317326590619295,
20
+ 0.5272979993924329,
21
+ 0.5208098266788344,
22
+ 0.5348989142344105,
23
+ 0.5210363368983464,
24
+ 0.544040470280087,
25
+ 0.5222786185801435,
26
+ 0.5306171116181606,
27
+ 0.532741231005403
28
+ ]
29
+ }
30
+ }
31
+ ]
32
+ },
33
+ "task_name": "RuSciBenchOECDClusteringP2P"
34
+ }
results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/STS22.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3",
3
+ "evaluation_time": 7.6240458488464355,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "cosine_pearson": 0.5498167422150342,
10
+ "cosine_spearman": 0.6208272980605856,
11
+ "euclidean_pearson": 0.5513938985046544,
12
+ "euclidean_spearman": 0.6208272980605856,
13
+ "hf_subset": "ru",
14
+ "languages": [
15
+ "rus-Cyrl"
16
+ ],
17
+ "main_score": 0.6208272980605856,
18
+ "manhattan_pearson": 0.5495505582267963,
19
+ "manhattan_spearman": 0.6197925120287682,
20
+ "pearson": 0.5498167422150342,
21
+ "spearman": 0.6208272980605856
22
+ }
23
+ ]
24
+ },
25
+ "task_name": "STS22"
26
+ }
results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/SensitiveTopicsClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "416b34a802308eac30e4192afc0ff99bb8dcc7f2",
3
+ "evaluation_time": 55.321635007858276,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.343212890625,
10
+ "f1": 0.4444830510959751,
11
+ "hf_subset": "default",
12
+ "languages": [
13
+ "rus-Cyrl"
14
+ ],
15
+ "lrap": 0.5518161349826368,
16
+ "main_score": 0.343212890625,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.34326171875,
20
+ "f1": 0.4201625594418228,
21
+ "lrap": 0.5236545138888846
22
+ },
23
+ {
24
+ "accuracy": 0.3056640625,
25
+ "f1": 0.42621370107155704,
26
+ "lrap": 0.5270928276909683
27
+ },
28
+ {
29
+ "accuracy": 0.3349609375,
30
+ "f1": 0.40866798099165463,
31
+ "lrap": 0.5254584418402736
32
+ },
33
+ {
34
+ "accuracy": 0.365234375,
35
+ "f1": 0.4658714499469487,
36
+ "lrap": 0.5608384874131928
37
+ },
38
+ {
39
+ "accuracy": 0.345703125,
40
+ "f1": 0.4493586128248626,
41
+ "lrap": 0.5564778645833316
42
+ },
43
+ {
44
+ "accuracy": 0.35400390625,
45
+ "f1": 0.458588454181207,
46
+ "lrap": 0.5751817491319442
47
+ },
48
+ {
49
+ "accuracy": 0.34423828125,
50
+ "f1": 0.4647278198948714,
51
+ "lrap": 0.5674709743923604
52
+ },
53
+ {
54
+ "accuracy": 0.34521484375,
55
+ "f1": 0.4383361383624593,
56
+ "lrap": 0.5567559136284704
57
+ },
58
+ {
59
+ "accuracy": 0.36083984375,
60
+ "f1": 0.4573978470370404,
61
+ "lrap": 0.5700819227430541
62
+ },
63
+ {
64
+ "accuracy": 0.3330078125,
65
+ "f1": 0.4555059472073264,
66
+ "lrap": 0.5551486545138872
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "SensitiveTopicsClassification"
73
+ }
results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/TERRa.json ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "7b58f24536063837d644aab9a023c62199b2a612",
3
+ "evaluation_time": 8.094063997268677,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.2",
6
+ "scores": {
7
+ "dev": [
8
+ {
9
+ "cosine_accuracy": 0.6221498371335505,
10
+ "cosine_accuracy_threshold": 0.7010552883148193,
11
+ "cosine_ap": 0.60546979190962,
12
+ "cosine_f1": 0.6869158878504672,
13
+ "cosine_f1_threshold": 0.5825852155685425,
14
+ "cosine_precision": 0.5345454545454545,
15
+ "cosine_recall": 0.9607843137254902,
16
+ "dot_accuracy": 0.6221498371335505,
17
+ "dot_accuracy_threshold": 0.7010552883148193,
18
+ "dot_ap": 0.60546979190962,
19
+ "dot_f1": 0.6869158878504672,
20
+ "dot_f1_threshold": 0.5825852751731873,
21
+ "dot_precision": 0.5345454545454545,
22
+ "dot_recall": 0.9607843137254902,
23
+ "euclidean_accuracy": 0.6221498371335505,
24
+ "euclidean_accuracy_threshold": 0.7732328176498413,
25
+ "euclidean_ap": 0.60546979190962,
26
+ "euclidean_f1": 0.6869158878504672,
27
+ "euclidean_f1_threshold": 0.9136881232261658,
28
+ "euclidean_precision": 0.5345454545454545,
29
+ "euclidean_recall": 0.9607843137254902,
30
+ "hf_subset": "default",
31
+ "languages": [
32
+ "rus-Cyrl"
33
+ ],
34
+ "main_score": 0.60546979190962,
35
+ "manhattan_accuracy": 0.6091205211726385,
36
+ "manhattan_accuracy_threshold": 23.17253875732422,
37
+ "manhattan_ap": 0.6018193054663248,
38
+ "manhattan_f1": 0.6804597701149424,
39
+ "manhattan_f1_threshold": 27.389568328857422,
40
+ "manhattan_precision": 0.524822695035461,
41
+ "manhattan_recall": 0.9673202614379085,
42
+ "max_accuracy": 0.6221498371335505,
43
+ "max_ap": 0.60546979190962,
44
+ "max_f1": 0.6869158878504672,
45
+ "max_precision": 0.5345454545454545,
46
+ "max_recall": 0.9673202614379085,
47
+ "similarity_accuracy": 0.6221498371335505,
48
+ "similarity_accuracy_threshold": 0.7010552883148193,
49
+ "similarity_ap": 0.60546979190962,
50
+ "similarity_f1": 0.6869158878504672,
51
+ "similarity_f1_threshold": 0.5825852155685425,
52
+ "similarity_precision": 0.5345454545454545,
53
+ "similarity_recall": 0.9607843137254902
54
+ }
55
+ ]
56
+ },
57
+ "task_name": "TERRa"
58
+ }
results/Alibaba-NLP__gte-Qwen2-1.5B-instruct/c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd/model_meta.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"name": "Alibaba-NLP/gte-Qwen2-1.5B-instruct", "revision": "c6c1b92f4a3e1b92b326ad29dd3c8433457df8dd", "release_date": "2024-07-29", "languages": ["eng_Latn"], "n_parameters": 1780000000, "memory_usage": null, "max_tokens": 131072, "embed_dim": 8960, "license": "apache-2.0", "open_weights": true, "public_training_data": null, "public_training_code": null, "framework": ["Sentence Transformers", "PyTorch"], "reference": "https://huggingface.co/Alibaba-NLP/gte-Qwen2-1.5B-instruct", "similarity_fn_name": "cosine", "use_instructions": true, "zero_shot_benchmarks": null, "loader": "instruct_wrapper"}
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/CEDRClassification.json CHANGED
@@ -6,64 +6,64 @@
6
  "scores": {
7
  "test": [
8
  {
9
- "accuracy": 0.5307120085015941,
10
- "f1": 0.5917230343930433,
11
  "hf_subset": "default",
12
  "languages": [
13
  "rus-Cyrl"
14
  ],
15
- "lrap": 0.8592109458023435,
16
- "main_score": 0.5307120085015941,
17
  "scores_per_experiment": [
18
  {
19
- "accuracy": 0.5642933049946866,
20
- "f1": 0.6070292680114152,
21
- "lrap": 0.8628055260361375
22
  },
23
  {
24
- "accuracy": 0.5765143464399575,
25
- "f1": 0.5857093544461771,
26
- "lrap": 0.8492029755579235
27
  },
28
  {
29
- "accuracy": 0.6036131774707758,
30
- "f1": 0.62644976226418,
31
- "lrap": 0.8632837407013871
32
  },
33
  {
34
- "accuracy": 0.48831030818278426,
35
- "f1": 0.5777644438978535,
36
- "lrap": 0.8585547290116954
37
  },
38
  {
39
- "accuracy": 0.5090329436769394,
40
- "f1": 0.6004348584160987,
41
- "lrap": 0.8719978746014928
42
  },
43
  {
44
- "accuracy": 0.5302869287991498,
45
- "f1": 0.5946970891286132,
46
- "lrap": 0.8562167906482523
47
  },
48
  {
49
- "accuracy": 0.5297555791710946,
50
- "f1": 0.5857774068968041,
51
- "lrap": 0.8560042507970299
52
  },
53
  {
54
- "accuracy": 0.5074388947927736,
55
- "f1": 0.5779301896872774,
56
- "lrap": 0.8629383634431511
57
  },
58
  {
59
- "accuracy": 0.5132837407013815,
60
- "f1": 0.5894929342611266,
61
- "lrap": 0.849787460148784
62
  },
63
  {
64
- "accuracy": 0.48459086078639746,
65
- "f1": 0.5719450369208869,
66
- "lrap": 0.8613177470775822
67
  }
68
  ]
69
  }
 
6
  "scores": {
7
  "test": [
8
  {
9
+ "accuracy": 0.5278427205100956,
10
+ "f1": 0.5921989224887071,
11
  "hf_subset": "default",
12
  "languages": [
13
  "rus-Cyrl"
14
  ],
15
+ "lrap": 0.8563602550478271,
16
+ "main_score": 0.5278427205100956,
17
  "scores_per_experiment": [
18
  {
19
+ "accuracy": 0.5472901168969182,
20
+ "f1": 0.6231915731616979,
21
+ "lrap": 0.8695005313496335
22
  },
23
  {
24
+ "accuracy": 0.502125398512221,
25
+ "f1": 0.5965022757364782,
26
+ "lrap": 0.8441551540913984
27
  },
28
  {
29
+ "accuracy": 0.5308182784272051,
30
+ "f1": 0.5907759632792869,
31
+ "lrap": 0.8546758767268918
32
  },
33
  {
34
+ "accuracy": 0.5557917109458024,
35
+ "f1": 0.6087947463545857,
36
+ "lrap": 0.8740701381509085
37
  },
38
  {
39
+ "accuracy": 0.5866099893730075,
40
+ "f1": 0.6140356617930539,
41
+ "lrap": 0.8620085015940544
42
  },
43
  {
44
+ "accuracy": 0.4914984059511158,
45
+ "f1": 0.5860852482396078,
46
+ "lrap": 0.8597768331562216
47
  },
48
  {
49
+ "accuracy": 0.49415515409139216,
50
+ "f1": 0.5662619403508564,
51
+ "lrap": 0.849734325185978
52
  },
53
  {
54
+ "accuracy": 0.5377258235919234,
55
+ "f1": 0.5785200123327568,
56
+ "lrap": 0.8406482465462339
57
  },
58
  {
59
+ "accuracy": 0.5292242295430393,
60
+ "f1": 0.6004224638445306,
61
+ "lrap": 0.8604675876726942
62
  },
63
  {
64
+ "accuracy": 0.5031880977683315,
65
+ "f1": 0.5573993397942165,
66
+ "lrap": 0.8485653560042566
67
  }
68
  ]
69
  }
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/GeoreviewClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "3765c0d1de6b7d264bc459433c45e5a75513839c",
3
+ "evaluation_time": 453.26696705818176,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.0",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.600146484375,
10
+ "f1": 0.5785872759937656,
11
+ "f1_weighted": 0.5785346078310617,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.600146484375,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.5888671875,
20
+ "f1": 0.561057257563129,
21
+ "f1_weighted": 0.560990135820034
22
+ },
23
+ {
24
+ "accuracy": 0.58837890625,
25
+ "f1": 0.554613821793309,
26
+ "f1_weighted": 0.5545248124700248
27
+ },
28
+ {
29
+ "accuracy": 0.609375,
30
+ "f1": 0.5876741838282947,
31
+ "f1_weighted": 0.5876103740996099
32
+ },
33
+ {
34
+ "accuracy": 0.60986328125,
35
+ "f1": 0.5874157051227198,
36
+ "f1_weighted": 0.5873556740591956
37
+ },
38
+ {
39
+ "accuracy": 0.5966796875,
40
+ "f1": 0.5765319732980858,
41
+ "f1_weighted": 0.5764661791524901
42
+ },
43
+ {
44
+ "accuracy": 0.5693359375,
45
+ "f1": 0.5596541863773145,
46
+ "f1_weighted": 0.5597054274104073
47
+ },
48
+ {
49
+ "accuracy": 0.61328125,
50
+ "f1": 0.5977133891466978,
51
+ "f1_weighted": 0.5976513645857018
52
+ },
53
+ {
54
+ "accuracy": 0.60595703125,
55
+ "f1": 0.5750309442619234,
56
+ "f1_weighted": 0.5749562602742928
57
+ },
58
+ {
59
+ "accuracy": 0.611328125,
60
+ "f1": 0.5931787267140416,
61
+ "f1_weighted": 0.5931291831938417
62
+ },
63
+ {
64
+ "accuracy": 0.6083984375,
65
+ "f1": 0.5930025718321394,
66
+ "f1_weighted": 0.5929566672450189
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "GeoreviewClassification"
73
+ }
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/GeoreviewClusteringP2P.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "97a313c8fc85b47f13f33e7e9a95c1ad888c7fec",
3
+ "evaluation_time": 372.32498264312744,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.0",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "rus-Cyrl"
12
+ ],
13
+ "main_score": 0.7831573824829378,
14
+ "v_measure": 0.7831573824829378,
15
+ "v_measure_std": 0.0031176989278815246,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.7825843607194956,
19
+ 0.7849570432841764,
20
+ 0.7891721456884293,
21
+ 0.7862151642070009,
22
+ 0.7838741734961716,
23
+ 0.7823980581391391,
24
+ 0.7778534604374112,
25
+ 0.7803934305692242,
26
+ 0.7799595239299798,
27
+ 0.78416646435835
28
+ ]
29
+ }
30
+ }
31
+ ]
32
+ },
33
+ "task_name": "GeoreviewClusteringP2P"
34
+ }
results/Alibaba-NLP__gte-Qwen2-7B-instruct/e26182b2122f4435e8b3ebecbf363990f409b45b/HeadlineClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "2fe05ee6b5832cda29f2ef7aaad7b7fe6a3609eb",
3
+ "evaluation_time": 134.56600427627563,
4
+ "kg_co2_emissions": null,
5
+ "mteb_version": "1.19.0",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.76376953125,
10
+ "f1": 0.7678292655168574,
11
+ "f1_weighted": 0.7677914449429452,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "rus-Cyrl"
15
+ ],
16
+ "main_score": 0.76376953125,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.765625,
20
+ "f1": 0.7712627436907281,
21
+ "f1_weighted": 0.7712322526359106
22
+ },
23
+ {
24
+ "accuracy": 0.77197265625,
25
+ "f1": 0.774913236114959,
26
+ "f1_weighted": 0.7748773489461512
27
+ },
28
+ {
29
+ "accuracy": 0.74951171875,
30
+ "f1": 0.7551607063589735,
31
+ "f1_weighted": 0.7551204826205611
32
+ },
33
+ {
34
+ "accuracy": 0.75634765625,
35
+ "f1": 0.762757017754281,
36
+ "f1_weighted": 0.7627145153671681
37
+ },
38
+ {
39
+ "accuracy": 0.75634765625,
40
+ "f1": 0.7620127891714809,
41
+ "f1_weighted": 0.7619696533603255
42
+ },
43
+ {
44
+ "accuracy": 0.75927734375,
45
+ "f1": 0.7643488366801355,
46
+ "f1_weighted": 0.7642998875782314
47
+ },
48
+ {
49
+ "accuracy": 0.7705078125,
50
+ "f1": 0.7735130202890201,
51
+ "f1_weighted": 0.7734671689226726
52
+ },
53
+ {
54
+ "accuracy": 0.76611328125,
55
+ "f1": 0.7671860750171898,
56
+ "f1_weighted": 0.7671701757864748
57
+ },
58
+ {
59
+ "accuracy": 0.76708984375,
60
+ "f1": 0.7700081895326587,
61
+ "f1_weighted": 0.7699714722946819
62
+ },
63
+ {
64
+ "accuracy": 0.77490234375,
65
+ "f1": 0.7771300405591465,
66
+ "f1_weighted": 0.7770914919172744
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "HeadlineClassification"
73
+ }