Muennighoff
commited on
Commit
•
59ec68e
1
Parent(s):
ee2f697
Update README.md (#1)
Browse files- Update README.md (667a64e43e751efea937b5f3e2237c4e2a53a92c)
README.md
CHANGED
@@ -15,6 +15,7 @@ model-index:
|
|
15 |
name: MTEB AmazonCounterfactualClassification (en)
|
16 |
config: en
|
17 |
split: test
|
|
|
18 |
metrics:
|
19 |
- type: accuracy
|
20 |
value: 65.88059701492537
|
@@ -29,6 +30,7 @@ model-index:
|
|
29 |
name: MTEB AmazonCounterfactualClassification (de)
|
30 |
config: de
|
31 |
split: test
|
|
|
32 |
metrics:
|
33 |
- type: accuracy
|
34 |
value: 59.07922912205568
|
@@ -43,6 +45,7 @@ model-index:
|
|
43 |
name: MTEB AmazonCounterfactualClassification (en-ext)
|
44 |
config: en-ext
|
45 |
split: test
|
|
|
46 |
metrics:
|
47 |
- type: accuracy
|
48 |
value: 64.91754122938531
|
@@ -57,6 +60,7 @@ model-index:
|
|
57 |
name: MTEB AmazonCounterfactualClassification (ja)
|
58 |
config: ja
|
59 |
split: test
|
|
|
60 |
metrics:
|
61 |
- type: accuracy
|
62 |
value: 56.423982869378996
|
@@ -71,6 +75,7 @@ model-index:
|
|
71 |
name: MTEB AmazonPolarityClassification
|
72 |
config: default
|
73 |
split: test
|
|
|
74 |
metrics:
|
75 |
- type: accuracy
|
76 |
value: 74.938225
|
@@ -85,6 +90,7 @@ model-index:
|
|
85 |
name: MTEB AmazonReviewsClassification (en)
|
86 |
config: en
|
87 |
split: test
|
|
|
88 |
metrics:
|
89 |
- type: accuracy
|
90 |
value: 35.098
|
@@ -97,6 +103,7 @@ model-index:
|
|
97 |
name: MTEB AmazonReviewsClassification (de)
|
98 |
config: de
|
99 |
split: test
|
|
|
100 |
metrics:
|
101 |
- type: accuracy
|
102 |
value: 24.516
|
@@ -109,6 +116,7 @@ model-index:
|
|
109 |
name: MTEB AmazonReviewsClassification (es)
|
110 |
config: es
|
111 |
split: test
|
|
|
112 |
metrics:
|
113 |
- type: accuracy
|
114 |
value: 29.097999999999995
|
@@ -121,6 +129,7 @@ model-index:
|
|
121 |
name: MTEB AmazonReviewsClassification (fr)
|
122 |
config: fr
|
123 |
split: test
|
|
|
124 |
metrics:
|
125 |
- type: accuracy
|
126 |
value: 27.395999999999997
|
@@ -133,6 +142,7 @@ model-index:
|
|
133 |
name: MTEB AmazonReviewsClassification (ja)
|
134 |
config: ja
|
135 |
split: test
|
|
|
136 |
metrics:
|
137 |
- type: accuracy
|
138 |
value: 21.724
|
@@ -145,6 +155,7 @@ model-index:
|
|
145 |
name: MTEB AmazonReviewsClassification (zh)
|
146 |
config: zh
|
147 |
split: test
|
|
|
148 |
metrics:
|
149 |
- type: accuracy
|
150 |
value: 23.976
|
@@ -157,6 +168,7 @@ model-index:
|
|
157 |
name: MTEB ArguAna
|
158 |
config: default
|
159 |
split: test
|
|
|
160 |
metrics:
|
161 |
- type: map_at_1
|
162 |
value: 13.442000000000002
|
@@ -213,6 +225,7 @@ model-index:
|
|
213 |
name: MTEB ArxivClusteringP2P
|
214 |
config: default
|
215 |
split: test
|
|
|
216 |
metrics:
|
217 |
- type: v_measure
|
218 |
value: 34.742482477870766
|
@@ -223,6 +236,7 @@ model-index:
|
|
223 |
name: MTEB ArxivClusteringS2S
|
224 |
config: default
|
225 |
split: test
|
|
|
226 |
metrics:
|
227 |
- type: v_measure
|
228 |
value: 24.67870651472156
|
@@ -233,6 +247,7 @@ model-index:
|
|
233 |
name: MTEB AskUbuntuDupQuestions
|
234 |
config: default
|
235 |
split: test
|
|
|
236 |
metrics:
|
237 |
- type: map
|
238 |
value: 52.63439984994702
|
@@ -245,6 +260,7 @@ model-index:
|
|
245 |
name: MTEB BIOSSES
|
246 |
config: default
|
247 |
split: test
|
|
|
248 |
metrics:
|
249 |
- type: cos_sim_pearson
|
250 |
value: 72.78000135012542
|
@@ -265,6 +281,7 @@ model-index:
|
|
265 |
name: MTEB BUCC (de-en)
|
266 |
config: de-en
|
267 |
split: test
|
|
|
268 |
metrics:
|
269 |
- type: accuracy
|
270 |
value: 1.0960334029227559
|
@@ -281,6 +298,7 @@ model-index:
|
|
281 |
name: MTEB BUCC (fr-en)
|
282 |
config: fr-en
|
283 |
split: test
|
|
|
284 |
metrics:
|
285 |
- type: accuracy
|
286 |
value: 0.02201188641866608
|
@@ -297,6 +315,7 @@ model-index:
|
|
297 |
name: MTEB BUCC (ru-en)
|
298 |
config: ru-en
|
299 |
split: test
|
|
|
300 |
metrics:
|
301 |
- type: accuracy
|
302 |
value: 0.0
|
@@ -313,6 +332,7 @@ model-index:
|
|
313 |
name: MTEB BUCC (zh-en)
|
314 |
config: zh-en
|
315 |
split: test
|
|
|
316 |
metrics:
|
317 |
- type: accuracy
|
318 |
value: 0.0
|
@@ -329,6 +349,7 @@ model-index:
|
|
329 |
name: MTEB Banking77Classification
|
330 |
config: default
|
331 |
split: test
|
|
|
332 |
metrics:
|
333 |
- type: accuracy
|
334 |
value: 74.67857142857142
|
@@ -341,6 +362,7 @@ model-index:
|
|
341 |
name: MTEB BiorxivClusteringP2P
|
342 |
config: default
|
343 |
split: test
|
|
|
344 |
metrics:
|
345 |
- type: v_measure
|
346 |
value: 28.93427045246491
|
@@ -351,6 +373,7 @@ model-index:
|
|
351 |
name: MTEB BiorxivClusteringS2S
|
352 |
config: default
|
353 |
split: test
|
|
|
354 |
metrics:
|
355 |
- type: v_measure
|
356 |
value: 23.080939123955474
|
@@ -361,6 +384,7 @@ model-index:
|
|
361 |
name: MTEB CQADupstackAndroidRetrieval
|
362 |
config: default
|
363 |
split: test
|
|
|
364 |
metrics:
|
365 |
- type: map_at_1
|
366 |
value: 18.221999999999998
|
@@ -417,6 +441,7 @@ model-index:
|
|
417 |
name: MTEB CQADupstackEnglishRetrieval
|
418 |
config: default
|
419 |
split: test
|
|
|
420 |
metrics:
|
421 |
- type: map_at_1
|
422 |
value: 12.058
|
@@ -473,6 +498,7 @@ model-index:
|
|
473 |
name: MTEB CQADupstackGamingRetrieval
|
474 |
config: default
|
475 |
split: test
|
|
|
476 |
metrics:
|
477 |
- type: map_at_1
|
478 |
value: 21.183
|
@@ -529,6 +555,7 @@ model-index:
|
|
529 |
name: MTEB CQADupstackGisRetrieval
|
530 |
config: default
|
531 |
split: test
|
|
|
532 |
metrics:
|
533 |
- type: map_at_1
|
534 |
value: 11.350999999999999
|
@@ -585,6 +612,7 @@ model-index:
|
|
585 |
name: MTEB CQADupstackMathematicaRetrieval
|
586 |
config: default
|
587 |
split: test
|
|
|
588 |
metrics:
|
589 |
- type: map_at_1
|
590 |
value: 8.08
|
@@ -641,6 +669,7 @@ model-index:
|
|
641 |
name: MTEB CQADupstackPhysicsRetrieval
|
642 |
config: default
|
643 |
split: test
|
|
|
644 |
metrics:
|
645 |
- type: map_at_1
|
646 |
value: 13.908999999999999
|
@@ -697,6 +726,7 @@ model-index:
|
|
697 |
name: MTEB CQADupstackProgrammersRetrieval
|
698 |
config: default
|
699 |
split: test
|
|
|
700 |
metrics:
|
701 |
- type: map_at_1
|
702 |
value: 12.598
|
@@ -753,6 +783,7 @@ model-index:
|
|
753 |
name: MTEB CQADupstackRetrieval
|
754 |
config: default
|
755 |
split: test
|
|
|
756 |
metrics:
|
757 |
- type: map_at_1
|
758 |
value: 12.738416666666666
|
@@ -809,6 +840,7 @@ model-index:
|
|
809 |
name: MTEB CQADupstackStatsRetrieval
|
810 |
config: default
|
811 |
split: test
|
|
|
812 |
metrics:
|
813 |
- type: map_at_1
|
814 |
value: 12.307
|
@@ -865,6 +897,7 @@ model-index:
|
|
865 |
name: MTEB CQADupstackTexRetrieval
|
866 |
config: default
|
867 |
split: test
|
|
|
868 |
metrics:
|
869 |
- type: map_at_1
|
870 |
value: 6.496
|
@@ -921,6 +954,7 @@ model-index:
|
|
921 |
name: MTEB CQADupstackUnixRetrieval
|
922 |
config: default
|
923 |
split: test
|
|
|
924 |
metrics:
|
925 |
- type: map_at_1
|
926 |
value: 13.843
|
@@ -977,6 +1011,7 @@ model-index:
|
|
977 |
name: MTEB CQADupstackWebmastersRetrieval
|
978 |
config: default
|
979 |
split: test
|
|
|
980 |
metrics:
|
981 |
- type: map_at_1
|
982 |
value: 13.757
|
@@ -1033,6 +1068,7 @@ model-index:
|
|
1033 |
name: MTEB CQADupstackWordpressRetrieval
|
1034 |
config: default
|
1035 |
split: test
|
|
|
1036 |
metrics:
|
1037 |
- type: map_at_1
|
1038 |
value: 9.057
|
@@ -1089,6 +1125,7 @@ model-index:
|
|
1089 |
name: MTEB ClimateFEVER
|
1090 |
config: default
|
1091 |
split: test
|
|
|
1092 |
metrics:
|
1093 |
- type: map_at_1
|
1094 |
value: 3.714
|
@@ -1145,6 +1182,7 @@ model-index:
|
|
1145 |
name: MTEB DBPedia
|
1146 |
config: default
|
1147 |
split: test
|
|
|
1148 |
metrics:
|
1149 |
- type: map_at_1
|
1150 |
value: 1.764
|
@@ -1201,6 +1239,7 @@ model-index:
|
|
1201 |
name: MTEB EmotionClassification
|
1202 |
config: default
|
1203 |
split: test
|
|
|
1204 |
metrics:
|
1205 |
- type: accuracy
|
1206 |
value: 42.225
|
@@ -1213,6 +1252,7 @@ model-index:
|
|
1213 |
name: MTEB FEVER
|
1214 |
config: default
|
1215 |
split: test
|
|
|
1216 |
metrics:
|
1217 |
- type: map_at_1
|
1218 |
value: 11.497
|
@@ -1269,6 +1309,7 @@ model-index:
|
|
1269 |
name: MTEB FiQA2018
|
1270 |
config: default
|
1271 |
split: test
|
|
|
1272 |
metrics:
|
1273 |
- type: map_at_1
|
1274 |
value: 3.637
|
@@ -1325,6 +1366,7 @@ model-index:
|
|
1325 |
name: MTEB HotpotQA
|
1326 |
config: default
|
1327 |
split: test
|
|
|
1328 |
metrics:
|
1329 |
- type: map_at_1
|
1330 |
value: 9.676
|
@@ -1381,6 +1423,7 @@ model-index:
|
|
1381 |
name: MTEB ImdbClassification
|
1382 |
config: default
|
1383 |
split: test
|
|
|
1384 |
metrics:
|
1385 |
- type: accuracy
|
1386 |
value: 62.895999999999994
|
@@ -1395,6 +1438,7 @@ model-index:
|
|
1395 |
name: MTEB MSMARCO
|
1396 |
config: default
|
1397 |
split: validation
|
|
|
1398 |
metrics:
|
1399 |
- type: map_at_1
|
1400 |
value: 2.88
|
@@ -1451,6 +1495,7 @@ model-index:
|
|
1451 |
name: MTEB MTOPDomainClassification (en)
|
1452 |
config: en
|
1453 |
split: test
|
|
|
1454 |
metrics:
|
1455 |
- type: accuracy
|
1456 |
value: 81.51846785225717
|
@@ -1463,6 +1508,7 @@ model-index:
|
|
1463 |
name: MTEB MTOPDomainClassification (de)
|
1464 |
config: de
|
1465 |
split: test
|
|
|
1466 |
metrics:
|
1467 |
- type: accuracy
|
1468 |
value: 60.37475345167653
|
@@ -1475,6 +1521,7 @@ model-index:
|
|
1475 |
name: MTEB MTOPDomainClassification (es)
|
1476 |
config: es
|
1477 |
split: test
|
|
|
1478 |
metrics:
|
1479 |
- type: accuracy
|
1480 |
value: 67.36824549699799
|
@@ -1487,6 +1534,7 @@ model-index:
|
|
1487 |
name: MTEB MTOPDomainClassification (fr)
|
1488 |
config: fr
|
1489 |
split: test
|
|
|
1490 |
metrics:
|
1491 |
- type: accuracy
|
1492 |
value: 63.12871907297212
|
@@ -1499,6 +1547,7 @@ model-index:
|
|
1499 |
name: MTEB MTOPDomainClassification (hi)
|
1500 |
config: hi
|
1501 |
split: test
|
|
|
1502 |
metrics:
|
1503 |
- type: accuracy
|
1504 |
value: 47.04553603442094
|
@@ -1511,6 +1560,7 @@ model-index:
|
|
1511 |
name: MTEB MTOPDomainClassification (th)
|
1512 |
config: th
|
1513 |
split: test
|
|
|
1514 |
metrics:
|
1515 |
- type: accuracy
|
1516 |
value: 52.282097649186255
|
@@ -1523,6 +1573,7 @@ model-index:
|
|
1523 |
name: MTEB MTOPIntentClassification (en)
|
1524 |
config: en
|
1525 |
split: test
|
|
|
1526 |
metrics:
|
1527 |
- type: accuracy
|
1528 |
value: 58.2421340629275
|
@@ -1535,6 +1586,7 @@ model-index:
|
|
1535 |
name: MTEB MTOPIntentClassification (de)
|
1536 |
config: de
|
1537 |
split: test
|
|
|
1538 |
metrics:
|
1539 |
- type: accuracy
|
1540 |
value: 45.069033530571986
|
@@ -1547,6 +1599,7 @@ model-index:
|
|
1547 |
name: MTEB MTOPIntentClassification (es)
|
1548 |
config: es
|
1549 |
split: test
|
|
|
1550 |
metrics:
|
1551 |
- type: accuracy
|
1552 |
value: 48.80920613742495
|
@@ -1559,6 +1612,7 @@ model-index:
|
|
1559 |
name: MTEB MTOPIntentClassification (fr)
|
1560 |
config: fr
|
1561 |
split: test
|
|
|
1562 |
metrics:
|
1563 |
- type: accuracy
|
1564 |
value: 44.337613529595984
|
@@ -1571,6 +1625,7 @@ model-index:
|
|
1571 |
name: MTEB MTOPIntentClassification (hi)
|
1572 |
config: hi
|
1573 |
split: test
|
|
|
1574 |
metrics:
|
1575 |
- type: accuracy
|
1576 |
value: 34.198637504481894
|
@@ -1583,6 +1638,7 @@ model-index:
|
|
1583 |
name: MTEB MTOPIntentClassification (th)
|
1584 |
config: th
|
1585 |
split: test
|
|
|
1586 |
metrics:
|
1587 |
- type: accuracy
|
1588 |
value: 43.11030741410488
|
@@ -1595,6 +1651,7 @@ model-index:
|
|
1595 |
name: MTEB MassiveIntentClassification (af)
|
1596 |
config: af
|
1597 |
split: test
|
|
|
1598 |
metrics:
|
1599 |
- type: accuracy
|
1600 |
value: 37.79421654337593
|
@@ -1607,6 +1664,7 @@ model-index:
|
|
1607 |
name: MTEB MassiveIntentClassification (am)
|
1608 |
config: am
|
1609 |
split: test
|
|
|
1610 |
metrics:
|
1611 |
- type: accuracy
|
1612 |
value: 23.722259583053127
|
@@ -1619,6 +1677,7 @@ model-index:
|
|
1619 |
name: MTEB MassiveIntentClassification (ar)
|
1620 |
config: ar
|
1621 |
split: test
|
|
|
1622 |
metrics:
|
1623 |
- type: accuracy
|
1624 |
value: 29.64021519838601
|
@@ -1631,6 +1690,7 @@ model-index:
|
|
1631 |
name: MTEB MassiveIntentClassification (az)
|
1632 |
config: az
|
1633 |
split: test
|
|
|
1634 |
metrics:
|
1635 |
- type: accuracy
|
1636 |
value: 39.4754539340955
|
@@ -1643,6 +1703,7 @@ model-index:
|
|
1643 |
name: MTEB MassiveIntentClassification (bn)
|
1644 |
config: bn
|
1645 |
split: test
|
|
|
1646 |
metrics:
|
1647 |
- type: accuracy
|
1648 |
value: 26.550100874243444
|
@@ -1655,6 +1716,7 @@ model-index:
|
|
1655 |
name: MTEB MassiveIntentClassification (cy)
|
1656 |
config: cy
|
1657 |
split: test
|
|
|
1658 |
metrics:
|
1659 |
- type: accuracy
|
1660 |
value: 38.78278412911904
|
@@ -1667,6 +1729,7 @@ model-index:
|
|
1667 |
name: MTEB MassiveIntentClassification (da)
|
1668 |
config: da
|
1669 |
split: test
|
|
|
1670 |
metrics:
|
1671 |
- type: accuracy
|
1672 |
value: 43.557498318762605
|
@@ -1679,6 +1742,7 @@ model-index:
|
|
1679 |
name: MTEB MassiveIntentClassification (de)
|
1680 |
config: de
|
1681 |
split: test
|
|
|
1682 |
metrics:
|
1683 |
- type: accuracy
|
1684 |
value: 40.39340954942838
|
@@ -1691,6 +1755,7 @@ model-index:
|
|
1691 |
name: MTEB MassiveIntentClassification (el)
|
1692 |
config: el
|
1693 |
split: test
|
|
|
1694 |
metrics:
|
1695 |
- type: accuracy
|
1696 |
value: 37.28648285137861
|
@@ -1703,6 +1768,7 @@ model-index:
|
|
1703 |
name: MTEB MassiveIntentClassification (en)
|
1704 |
config: en
|
1705 |
split: test
|
|
|
1706 |
metrics:
|
1707 |
- type: accuracy
|
1708 |
value: 58.080026899798256
|
@@ -1715,6 +1781,7 @@ model-index:
|
|
1715 |
name: MTEB MassiveIntentClassification (es)
|
1716 |
config: es
|
1717 |
split: test
|
|
|
1718 |
metrics:
|
1719 |
- type: accuracy
|
1720 |
value: 41.176866173503704
|
@@ -1727,6 +1794,7 @@ model-index:
|
|
1727 |
name: MTEB MassiveIntentClassification (fa)
|
1728 |
config: fa
|
1729 |
split: test
|
|
|
1730 |
metrics:
|
1731 |
- type: accuracy
|
1732 |
value: 36.422326832548755
|
@@ -1739,6 +1807,7 @@ model-index:
|
|
1739 |
name: MTEB MassiveIntentClassification (fi)
|
1740 |
config: fi
|
1741 |
split: test
|
|
|
1742 |
metrics:
|
1743 |
- type: accuracy
|
1744 |
value: 38.75588433086752
|
@@ -1751,6 +1820,7 @@ model-index:
|
|
1751 |
name: MTEB MassiveIntentClassification (fr)
|
1752 |
config: fr
|
1753 |
split: test
|
|
|
1754 |
metrics:
|
1755 |
- type: accuracy
|
1756 |
value: 43.67182246133153
|
@@ -1763,6 +1833,7 @@ model-index:
|
|
1763 |
name: MTEB MassiveIntentClassification (he)
|
1764 |
config: he
|
1765 |
split: test
|
|
|
1766 |
metrics:
|
1767 |
- type: accuracy
|
1768 |
value: 31.980497646267658
|
@@ -1775,6 +1846,7 @@ model-index:
|
|
1775 |
name: MTEB MassiveIntentClassification (hi)
|
1776 |
config: hi
|
1777 |
split: test
|
|
|
1778 |
metrics:
|
1779 |
- type: accuracy
|
1780 |
value: 28.039677202420982
|
@@ -1787,6 +1859,7 @@ model-index:
|
|
1787 |
name: MTEB MassiveIntentClassification (hu)
|
1788 |
config: hu
|
1789 |
split: test
|
|
|
1790 |
metrics:
|
1791 |
- type: accuracy
|
1792 |
value: 38.13718897108272
|
@@ -1799,6 +1872,7 @@ model-index:
|
|
1799 |
name: MTEB MassiveIntentClassification (hy)
|
1800 |
config: hy
|
1801 |
split: test
|
|
|
1802 |
metrics:
|
1803 |
- type: accuracy
|
1804 |
value: 26.05245460659045
|
@@ -1811,6 +1885,7 @@ model-index:
|
|
1811 |
name: MTEB MassiveIntentClassification (id)
|
1812 |
config: id
|
1813 |
split: test
|
|
|
1814 |
metrics:
|
1815 |
- type: accuracy
|
1816 |
value: 41.156691324815064
|
@@ -1823,6 +1898,7 @@ model-index:
|
|
1823 |
name: MTEB MassiveIntentClassification (is)
|
1824 |
config: is
|
1825 |
split: test
|
|
|
1826 |
metrics:
|
1827 |
- type: accuracy
|
1828 |
value: 38.62811028917284
|
@@ -1835,6 +1911,7 @@ model-index:
|
|
1835 |
name: MTEB MassiveIntentClassification (it)
|
1836 |
config: it
|
1837 |
split: test
|
|
|
1838 |
metrics:
|
1839 |
- type: accuracy
|
1840 |
value: 44.0383322125084
|
@@ -1847,6 +1924,7 @@ model-index:
|
|
1847 |
name: MTEB MassiveIntentClassification (ja)
|
1848 |
config: ja
|
1849 |
split: test
|
|
|
1850 |
metrics:
|
1851 |
- type: accuracy
|
1852 |
value: 46.20712844653666
|
@@ -1859,6 +1937,7 @@ model-index:
|
|
1859 |
name: MTEB MassiveIntentClassification (jv)
|
1860 |
config: jv
|
1861 |
split: test
|
|
|
1862 |
metrics:
|
1863 |
- type: accuracy
|
1864 |
value: 37.60591795561533
|
@@ -1871,6 +1950,7 @@ model-index:
|
|
1871 |
name: MTEB MassiveIntentClassification (ka)
|
1872 |
config: ka
|
1873 |
split: test
|
|
|
1874 |
metrics:
|
1875 |
- type: accuracy
|
1876 |
value: 24.47209145931405
|
@@ -1883,6 +1963,7 @@ model-index:
|
|
1883 |
name: MTEB MassiveIntentClassification (km)
|
1884 |
config: km
|
1885 |
split: test
|
|
|
1886 |
metrics:
|
1887 |
- type: accuracy
|
1888 |
value: 26.23739071956961
|
@@ -1895,6 +1976,7 @@ model-index:
|
|
1895 |
name: MTEB MassiveIntentClassification (kn)
|
1896 |
config: kn
|
1897 |
split: test
|
|
|
1898 |
metrics:
|
1899 |
- type: accuracy
|
1900 |
value: 17.831203765971754
|
@@ -1907,6 +1989,7 @@ model-index:
|
|
1907 |
name: MTEB MassiveIntentClassification (ko)
|
1908 |
config: ko
|
1909 |
split: test
|
|
|
1910 |
metrics:
|
1911 |
- type: accuracy
|
1912 |
value: 37.266308002689975
|
@@ -1919,6 +2002,7 @@ model-index:
|
|
1919 |
name: MTEB MassiveIntentClassification (lv)
|
1920 |
config: lv
|
1921 |
split: test
|
|
|
1922 |
metrics:
|
1923 |
- type: accuracy
|
1924 |
value: 40.93140551445864
|
@@ -1931,6 +2015,7 @@ model-index:
|
|
1931 |
name: MTEB MassiveIntentClassification (ml)
|
1932 |
config: ml
|
1933 |
split: test
|
|
|
1934 |
metrics:
|
1935 |
- type: accuracy
|
1936 |
value: 17.88500336247478
|
@@ -1943,6 +2028,7 @@ model-index:
|
|
1943 |
name: MTEB MassiveIntentClassification (mn)
|
1944 |
config: mn
|
1945 |
split: test
|
|
|
1946 |
metrics:
|
1947 |
- type: accuracy
|
1948 |
value: 32.975790181573636
|
@@ -1955,6 +2041,7 @@ model-index:
|
|
1955 |
name: MTEB MassiveIntentClassification (ms)
|
1956 |
config: ms
|
1957 |
split: test
|
|
|
1958 |
metrics:
|
1959 |
- type: accuracy
|
1960 |
value: 40.91123066577001
|
@@ -1967,6 +2054,7 @@ model-index:
|
|
1967 |
name: MTEB MassiveIntentClassification (my)
|
1968 |
config: my
|
1969 |
split: test
|
|
|
1970 |
metrics:
|
1971 |
- type: accuracy
|
1972 |
value: 17.834566240753194
|
@@ -1979,6 +2067,7 @@ model-index:
|
|
1979 |
name: MTEB MassiveIntentClassification (nb)
|
1980 |
config: nb
|
1981 |
split: test
|
|
|
1982 |
metrics:
|
1983 |
- type: accuracy
|
1984 |
value: 39.47881640887693
|
@@ -1991,6 +2080,7 @@ model-index:
|
|
1991 |
name: MTEB MassiveIntentClassification (nl)
|
1992 |
config: nl
|
1993 |
split: test
|
|
|
1994 |
metrics:
|
1995 |
- type: accuracy
|
1996 |
value: 41.76193678547412
|
@@ -2003,6 +2093,7 @@ model-index:
|
|
2003 |
name: MTEB MassiveIntentClassification (pl)
|
2004 |
config: pl
|
2005 |
split: test
|
|
|
2006 |
metrics:
|
2007 |
- type: accuracy
|
2008 |
value: 42.61936785474109
|
@@ -2015,6 +2106,7 @@ model-index:
|
|
2015 |
name: MTEB MassiveIntentClassification (pt)
|
2016 |
config: pt
|
2017 |
split: test
|
|
|
2018 |
metrics:
|
2019 |
- type: accuracy
|
2020 |
value: 44.54270342972427
|
@@ -2027,6 +2119,7 @@ model-index:
|
|
2027 |
name: MTEB MassiveIntentClassification (ro)
|
2028 |
config: ro
|
2029 |
split: test
|
|
|
2030 |
metrics:
|
2031 |
- type: accuracy
|
2032 |
value: 39.96973772696705
|
@@ -2039,6 +2132,7 @@ model-index:
|
|
2039 |
name: MTEB MassiveIntentClassification (ru)
|
2040 |
config: ru
|
2041 |
split: test
|
|
|
2042 |
metrics:
|
2043 |
- type: accuracy
|
2044 |
value: 37.461331540013454
|
@@ -2051,6 +2145,7 @@ model-index:
|
|
2051 |
name: MTEB MassiveIntentClassification (sl)
|
2052 |
config: sl
|
2053 |
split: test
|
|
|
2054 |
metrics:
|
2055 |
- type: accuracy
|
2056 |
value: 38.28850033624748
|
@@ -2063,6 +2158,7 @@ model-index:
|
|
2063 |
name: MTEB MassiveIntentClassification (sq)
|
2064 |
config: sq
|
2065 |
split: test
|
|
|
2066 |
metrics:
|
2067 |
- type: accuracy
|
2068 |
value: 40.95494283792872
|
@@ -2075,6 +2171,7 @@ model-index:
|
|
2075 |
name: MTEB MassiveIntentClassification (sv)
|
2076 |
config: sv
|
2077 |
split: test
|
|
|
2078 |
metrics:
|
2079 |
- type: accuracy
|
2080 |
value: 41.85272360457296
|
@@ -2087,6 +2184,7 @@ model-index:
|
|
2087 |
name: MTEB MassiveIntentClassification (sw)
|
2088 |
config: sw
|
2089 |
split: test
|
|
|
2090 |
metrics:
|
2091 |
- type: accuracy
|
2092 |
value: 38.328850033624754
|
@@ -2099,6 +2197,7 @@ model-index:
|
|
2099 |
name: MTEB MassiveIntentClassification (ta)
|
2100 |
config: ta
|
2101 |
split: test
|
|
|
2102 |
metrics:
|
2103 |
- type: accuracy
|
2104 |
value: 19.031607262945528
|
@@ -2111,6 +2210,7 @@ model-index:
|
|
2111 |
name: MTEB MassiveIntentClassification (te)
|
2112 |
config: te
|
2113 |
split: test
|
|
|
2114 |
metrics:
|
2115 |
- type: accuracy
|
2116 |
value: 19.38466711499664
|
@@ -2123,6 +2223,7 @@ model-index:
|
|
2123 |
name: MTEB MassiveIntentClassification (th)
|
2124 |
config: th
|
2125 |
split: test
|
|
|
2126 |
metrics:
|
2127 |
- type: accuracy
|
2128 |
value: 34.088769334229994
|
@@ -2135,6 +2236,7 @@ model-index:
|
|
2135 |
name: MTEB MassiveIntentClassification (tl)
|
2136 |
config: tl
|
2137 |
split: test
|
|
|
2138 |
metrics:
|
2139 |
- type: accuracy
|
2140 |
value: 40.285810356422324
|
@@ -2147,6 +2249,7 @@ model-index:
|
|
2147 |
name: MTEB MassiveIntentClassification (tr)
|
2148 |
config: tr
|
2149 |
split: test
|
|
|
2150 |
metrics:
|
2151 |
- type: accuracy
|
2152 |
value: 38.860121049092136
|
@@ -2159,6 +2262,7 @@ model-index:
|
|
2159 |
name: MTEB MassiveIntentClassification (ur)
|
2160 |
config: ur
|
2161 |
split: test
|
|
|
2162 |
metrics:
|
2163 |
- type: accuracy
|
2164 |
value: 27.834566240753194
|
@@ -2171,6 +2275,7 @@ model-index:
|
|
2171 |
name: MTEB MassiveIntentClassification (vi)
|
2172 |
config: vi
|
2173 |
split: test
|
|
|
2174 |
metrics:
|
2175 |
- type: accuracy
|
2176 |
value: 38.70544720914593
|
@@ -2183,6 +2288,7 @@ model-index:
|
|
2183 |
name: MTEB MassiveIntentClassification (zh-CN)
|
2184 |
config: zh-CN
|
2185 |
split: test
|
|
|
2186 |
metrics:
|
2187 |
- type: accuracy
|
2188 |
value: 45.78009414929387
|
@@ -2195,6 +2301,7 @@ model-index:
|
|
2195 |
name: MTEB MassiveIntentClassification (zh-TW)
|
2196 |
config: zh-TW
|
2197 |
split: test
|
|
|
2198 |
metrics:
|
2199 |
- type: accuracy
|
2200 |
value: 42.32010759919301
|
@@ -2207,6 +2314,7 @@ model-index:
|
|
2207 |
name: MTEB MassiveScenarioClassification (af)
|
2208 |
config: af
|
2209 |
split: test
|
|
|
2210 |
metrics:
|
2211 |
- type: accuracy
|
2212 |
value: 40.24546065904506
|
@@ -2219,6 +2327,7 @@ model-index:
|
|
2219 |
name: MTEB MassiveScenarioClassification (am)
|
2220 |
config: am
|
2221 |
split: test
|
|
|
2222 |
metrics:
|
2223 |
- type: accuracy
|
2224 |
value: 25.68930733019502
|
@@ -2231,6 +2340,7 @@ model-index:
|
|
2231 |
name: MTEB MassiveScenarioClassification (ar)
|
2232 |
config: ar
|
2233 |
split: test
|
|
|
2234 |
metrics:
|
2235 |
- type: accuracy
|
2236 |
value: 32.39744451916611
|
@@ -2243,6 +2353,7 @@ model-index:
|
|
2243 |
name: MTEB MassiveScenarioClassification (az)
|
2244 |
config: az
|
2245 |
split: test
|
|
|
2246 |
metrics:
|
2247 |
- type: accuracy
|
2248 |
value: 40.53127101546738
|
@@ -2255,6 +2366,7 @@ model-index:
|
|
2255 |
name: MTEB MassiveScenarioClassification (bn)
|
2256 |
config: bn
|
2257 |
split: test
|
|
|
2258 |
metrics:
|
2259 |
- type: accuracy
|
2260 |
value: 27.23268325487559
|
@@ -2267,6 +2379,7 @@ model-index:
|
|
2267 |
name: MTEB MassiveScenarioClassification (cy)
|
2268 |
config: cy
|
2269 |
split: test
|
|
|
2270 |
metrics:
|
2271 |
- type: accuracy
|
2272 |
value: 38.69872225958305
|
@@ -2279,6 +2392,7 @@ model-index:
|
|
2279 |
name: MTEB MassiveScenarioClassification (da)
|
2280 |
config: da
|
2281 |
split: test
|
|
|
2282 |
metrics:
|
2283 |
- type: accuracy
|
2284 |
value: 44.75453934095494
|
@@ -2291,6 +2405,7 @@ model-index:
|
|
2291 |
name: MTEB MassiveScenarioClassification (de)
|
2292 |
config: de
|
2293 |
split: test
|
|
|
2294 |
metrics:
|
2295 |
- type: accuracy
|
2296 |
value: 41.355077336919976
|
@@ -2303,6 +2418,7 @@ model-index:
|
|
2303 |
name: MTEB MassiveScenarioClassification (el)
|
2304 |
config: el
|
2305 |
split: test
|
|
|
2306 |
metrics:
|
2307 |
- type: accuracy
|
2308 |
value: 38.43981170141224
|
@@ -2315,6 +2431,7 @@ model-index:
|
|
2315 |
name: MTEB MassiveScenarioClassification (en)
|
2316 |
config: en
|
2317 |
split: test
|
|
|
2318 |
metrics:
|
2319 |
- type: accuracy
|
2320 |
value: 66.33826496301278
|
@@ -2327,6 +2444,7 @@ model-index:
|
|
2327 |
name: MTEB MassiveScenarioClassification (es)
|
2328 |
config: es
|
2329 |
split: test
|
|
|
2330 |
metrics:
|
2331 |
- type: accuracy
|
2332 |
value: 44.17955615332885
|
@@ -2339,6 +2457,7 @@ model-index:
|
|
2339 |
name: MTEB MassiveScenarioClassification (fa)
|
2340 |
config: fa
|
2341 |
split: test
|
|
|
2342 |
metrics:
|
2343 |
- type: accuracy
|
2344 |
value: 34.82851378614661
|
@@ -2351,6 +2470,7 @@ model-index:
|
|
2351 |
name: MTEB MassiveScenarioClassification (fi)
|
2352 |
config: fi
|
2353 |
split: test
|
|
|
2354 |
metrics:
|
2355 |
- type: accuracy
|
2356 |
value: 40.561533288500335
|
@@ -2363,6 +2483,7 @@ model-index:
|
|
2363 |
name: MTEB MassiveScenarioClassification (fr)
|
2364 |
config: fr
|
2365 |
split: test
|
|
|
2366 |
metrics:
|
2367 |
- type: accuracy
|
2368 |
value: 45.917955615332886
|
@@ -2375,6 +2496,7 @@ model-index:
|
|
2375 |
name: MTEB MassiveScenarioClassification (he)
|
2376 |
config: he
|
2377 |
split: test
|
|
|
2378 |
metrics:
|
2379 |
- type: accuracy
|
2380 |
value: 32.08473436449227
|
@@ -2387,6 +2509,7 @@ model-index:
|
|
2387 |
name: MTEB MassiveScenarioClassification (hi)
|
2388 |
config: hi
|
2389 |
split: test
|
|
|
2390 |
metrics:
|
2391 |
- type: accuracy
|
2392 |
value: 28.369199731002016
|
@@ -2399,6 +2522,7 @@ model-index:
|
|
2399 |
name: MTEB MassiveScenarioClassification (hu)
|
2400 |
config: hu
|
2401 |
split: test
|
|
|
2402 |
metrics:
|
2403 |
- type: accuracy
|
2404 |
value: 39.49226630800269
|
@@ -2411,6 +2535,7 @@ model-index:
|
|
2411 |
name: MTEB MassiveScenarioClassification (hy)
|
2412 |
config: hy
|
2413 |
split: test
|
|
|
2414 |
metrics:
|
2415 |
- type: accuracy
|
2416 |
value: 25.904505716207133
|
@@ -2423,6 +2548,7 @@ model-index:
|
|
2423 |
name: MTEB MassiveScenarioClassification (id)
|
2424 |
config: id
|
2425 |
split: test
|
|
|
2426 |
metrics:
|
2427 |
- type: accuracy
|
2428 |
value: 40.95830531271016
|
@@ -2435,6 +2561,7 @@ model-index:
|
|
2435 |
name: MTEB MassiveScenarioClassification (is)
|
2436 |
config: is
|
2437 |
split: test
|
|
|
2438 |
metrics:
|
2439 |
- type: accuracy
|
2440 |
value: 38.564223268325485
|
@@ -2447,6 +2574,7 @@ model-index:
|
|
2447 |
name: MTEB MassiveScenarioClassification (it)
|
2448 |
config: it
|
2449 |
split: test
|
|
|
2450 |
metrics:
|
2451 |
- type: accuracy
|
2452 |
value: 46.58708809683928
|
@@ -2459,6 +2587,7 @@ model-index:
|
|
2459 |
name: MTEB MassiveScenarioClassification (ja)
|
2460 |
config: ja
|
2461 |
split: test
|
|
|
2462 |
metrics:
|
2463 |
- type: accuracy
|
2464 |
value: 46.24747814391393
|
@@ -2471,6 +2600,7 @@ model-index:
|
|
2471 |
name: MTEB MassiveScenarioClassification (jv)
|
2472 |
config: jv
|
2473 |
split: test
|
|
|
2474 |
metrics:
|
2475 |
- type: accuracy
|
2476 |
value: 39.6570275722932
|
@@ -2483,6 +2613,7 @@ model-index:
|
|
2483 |
name: MTEB MassiveScenarioClassification (ka)
|
2484 |
config: ka
|
2485 |
split: test
|
|
|
2486 |
metrics:
|
2487 |
- type: accuracy
|
2488 |
value: 25.279085406859448
|
@@ -2495,6 +2626,7 @@ model-index:
|
|
2495 |
name: MTEB MassiveScenarioClassification (km)
|
2496 |
config: km
|
2497 |
split: test
|
|
|
2498 |
metrics:
|
2499 |
- type: accuracy
|
2500 |
value: 28.97108271687962
|
@@ -2507,6 +2639,7 @@ model-index:
|
|
2507 |
name: MTEB MassiveScenarioClassification (kn)
|
2508 |
config: kn
|
2509 |
split: test
|
|
|
2510 |
metrics:
|
2511 |
- type: accuracy
|
2512 |
value: 19.27370544720915
|
@@ -2519,6 +2652,7 @@ model-index:
|
|
2519 |
name: MTEB MassiveScenarioClassification (ko)
|
2520 |
config: ko
|
2521 |
split: test
|
|
|
2522 |
metrics:
|
2523 |
- type: accuracy
|
2524 |
value: 35.729657027572294
|
@@ -2531,6 +2665,7 @@ model-index:
|
|
2531 |
name: MTEB MassiveScenarioClassification (lv)
|
2532 |
config: lv
|
2533 |
split: test
|
|
|
2534 |
metrics:
|
2535 |
- type: accuracy
|
2536 |
value: 39.57296570275723
|
@@ -2543,6 +2678,7 @@ model-index:
|
|
2543 |
name: MTEB MassiveScenarioClassification (ml)
|
2544 |
config: ml
|
2545 |
split: test
|
|
|
2546 |
metrics:
|
2547 |
- type: accuracy
|
2548 |
value: 19.895763281775388
|
@@ -2555,6 +2691,7 @@ model-index:
|
|
2555 |
name: MTEB MassiveScenarioClassification (mn)
|
2556 |
config: mn
|
2557 |
split: test
|
|
|
2558 |
metrics:
|
2559 |
- type: accuracy
|
2560 |
value: 32.431069266980494
|
@@ -2567,6 +2704,7 @@ model-index:
|
|
2567 |
name: MTEB MassiveScenarioClassification (ms)
|
2568 |
config: ms
|
2569 |
split: test
|
|
|
2570 |
metrics:
|
2571 |
- type: accuracy
|
2572 |
value: 42.32347007397445
|
@@ -2579,6 +2717,7 @@ model-index:
|
|
2579 |
name: MTEB MassiveScenarioClassification (my)
|
2580 |
config: my
|
2581 |
split: test
|
|
|
2582 |
metrics:
|
2583 |
- type: accuracy
|
2584 |
value: 20.864156018829856
|
@@ -2591,6 +2730,7 @@ model-index:
|
|
2591 |
name: MTEB MassiveScenarioClassification (nb)
|
2592 |
config: nb
|
2593 |
split: test
|
|
|
2594 |
metrics:
|
2595 |
- type: accuracy
|
2596 |
value: 40.47074646940148
|
@@ -2603,6 +2743,7 @@ model-index:
|
|
2603 |
name: MTEB MassiveScenarioClassification (nl)
|
2604 |
config: nl
|
2605 |
split: test
|
|
|
2606 |
metrics:
|
2607 |
- type: accuracy
|
2608 |
value: 43.591123066577
|
@@ -2615,6 +2756,7 @@ model-index:
|
|
2615 |
name: MTEB MassiveScenarioClassification (pl)
|
2616 |
config: pl
|
2617 |
split: test
|
|
|
2618 |
metrics:
|
2619 |
- type: accuracy
|
2620 |
value: 41.876260928043045
|
@@ -2627,6 +2769,7 @@ model-index:
|
|
2627 |
name: MTEB MassiveScenarioClassification (pt)
|
2628 |
config: pt
|
2629 |
split: test
|
|
|
2630 |
metrics:
|
2631 |
- type: accuracy
|
2632 |
value: 46.30800268997983
|
@@ -2639,6 +2782,7 @@ model-index:
|
|
2639 |
name: MTEB MassiveScenarioClassification (ro)
|
2640 |
config: ro
|
2641 |
split: test
|
|
|
2642 |
metrics:
|
2643 |
- type: accuracy
|
2644 |
value: 42.525218560860786
|
@@ -2651,6 +2795,7 @@ model-index:
|
|
2651 |
name: MTEB MassiveScenarioClassification (ru)
|
2652 |
config: ru
|
2653 |
split: test
|
|
|
2654 |
metrics:
|
2655 |
- type: accuracy
|
2656 |
value: 35.94821788836584
|
@@ -2663,6 +2808,7 @@ model-index:
|
|
2663 |
name: MTEB MassiveScenarioClassification (sl)
|
2664 |
config: sl
|
2665 |
split: test
|
|
|
2666 |
metrics:
|
2667 |
- type: accuracy
|
2668 |
value: 38.69199731002017
|
@@ -2675,6 +2821,7 @@ model-index:
|
|
2675 |
name: MTEB MassiveScenarioClassification (sq)
|
2676 |
config: sq
|
2677 |
split: test
|
|
|
2678 |
metrics:
|
2679 |
- type: accuracy
|
2680 |
value: 40.474108944182916
|
@@ -2687,6 +2834,7 @@ model-index:
|
|
2687 |
name: MTEB MassiveScenarioClassification (sv)
|
2688 |
config: sv
|
2689 |
split: test
|
|
|
2690 |
metrics:
|
2691 |
- type: accuracy
|
2692 |
value: 41.523201075991935
|
@@ -2699,6 +2847,7 @@ model-index:
|
|
2699 |
name: MTEB MassiveScenarioClassification (sw)
|
2700 |
config: sw
|
2701 |
split: test
|
|
|
2702 |
metrics:
|
2703 |
- type: accuracy
|
2704 |
value: 39.54942837928716
|
@@ -2711,6 +2860,7 @@ model-index:
|
|
2711 |
name: MTEB MassiveScenarioClassification (ta)
|
2712 |
config: ta
|
2713 |
split: test
|
|
|
2714 |
metrics:
|
2715 |
- type: accuracy
|
2716 |
value: 22.8782784129119
|
@@ -2723,6 +2873,7 @@ model-index:
|
|
2723 |
name: MTEB MassiveScenarioClassification (te)
|
2724 |
config: te
|
2725 |
split: test
|
|
|
2726 |
metrics:
|
2727 |
- type: accuracy
|
2728 |
value: 20.51445864156019
|
@@ -2735,6 +2886,7 @@ model-index:
|
|
2735 |
name: MTEB MassiveScenarioClassification (th)
|
2736 |
config: th
|
2737 |
split: test
|
|
|
2738 |
metrics:
|
2739 |
- type: accuracy
|
2740 |
value: 34.92602555480834
|
@@ -2747,6 +2899,7 @@ model-index:
|
|
2747 |
name: MTEB MassiveScenarioClassification (tl)
|
2748 |
config: tl
|
2749 |
split: test
|
|
|
2750 |
metrics:
|
2751 |
- type: accuracy
|
2752 |
value: 40.74983187626093
|
@@ -2759,6 +2912,7 @@ model-index:
|
|
2759 |
name: MTEB MassiveScenarioClassification (tr)
|
2760 |
config: tr
|
2761 |
split: test
|
|
|
2762 |
metrics:
|
2763 |
- type: accuracy
|
2764 |
value: 39.06859448554136
|
@@ -2771,6 +2925,7 @@ model-index:
|
|
2771 |
name: MTEB MassiveScenarioClassification (ur)
|
2772 |
config: ur
|
2773 |
split: test
|
|
|
2774 |
metrics:
|
2775 |
- type: accuracy
|
2776 |
value: 29.747814391392062
|
@@ -2783,6 +2938,7 @@ model-index:
|
|
2783 |
name: MTEB MassiveScenarioClassification (vi)
|
2784 |
config: vi
|
2785 |
split: test
|
|
|
2786 |
metrics:
|
2787 |
- type: accuracy
|
2788 |
value: 38.02286482851379
|
@@ -2795,6 +2951,7 @@ model-index:
|
|
2795 |
name: MTEB MassiveScenarioClassification (zh-CN)
|
2796 |
config: zh-CN
|
2797 |
split: test
|
|
|
2798 |
metrics:
|
2799 |
- type: accuracy
|
2800 |
value: 48.550773369199725
|
@@ -2807,6 +2964,7 @@ model-index:
|
|
2807 |
name: MTEB MassiveScenarioClassification (zh-TW)
|
2808 |
config: zh-TW
|
2809 |
split: test
|
|
|
2810 |
metrics:
|
2811 |
- type: accuracy
|
2812 |
value: 45.17821116341628
|
@@ -2819,6 +2977,7 @@ model-index:
|
|
2819 |
name: MTEB MedrxivClusteringP2P
|
2820 |
config: default
|
2821 |
split: test
|
|
|
2822 |
metrics:
|
2823 |
- type: v_measure
|
2824 |
value: 28.301902023313875
|
@@ -2829,6 +2988,7 @@ model-index:
|
|
2829 |
name: MTEB MedrxivClusteringS2S
|
2830 |
config: default
|
2831 |
split: test
|
|
|
2832 |
metrics:
|
2833 |
- type: v_measure
|
2834 |
value: 24.932123582259287
|
@@ -2839,6 +2999,7 @@ model-index:
|
|
2839 |
name: MTEB MindSmallReranking
|
2840 |
config: default
|
2841 |
split: test
|
|
|
2842 |
metrics:
|
2843 |
- type: map
|
2844 |
value: 29.269341041468326
|
@@ -2851,6 +3012,7 @@ model-index:
|
|
2851 |
name: MTEB NFCorpus
|
2852 |
config: default
|
2853 |
split: test
|
|
|
2854 |
metrics:
|
2855 |
- type: map_at_1
|
2856 |
value: 1.2269999999999999
|
@@ -2907,6 +3069,7 @@ model-index:
|
|
2907 |
name: MTEB NQ
|
2908 |
config: default
|
2909 |
split: test
|
|
|
2910 |
metrics:
|
2911 |
- type: map_at_1
|
2912 |
value: 3.515
|
@@ -2963,6 +3126,7 @@ model-index:
|
|
2963 |
name: MTEB QuoraRetrieval
|
2964 |
config: default
|
2965 |
split: test
|
|
|
2966 |
metrics:
|
2967 |
- type: map_at_1
|
2968 |
value: 61.697
|
@@ -3019,6 +3183,7 @@ model-index:
|
|
3019 |
name: MTEB RedditClustering
|
3020 |
config: default
|
3021 |
split: test
|
|
|
3022 |
metrics:
|
3023 |
- type: v_measure
|
3024 |
value: 33.75741018380938
|
@@ -3029,6 +3194,7 @@ model-index:
|
|
3029 |
name: MTEB RedditClusteringP2P
|
3030 |
config: default
|
3031 |
split: test
|
|
|
3032 |
metrics:
|
3033 |
- type: v_measure
|
3034 |
value: 41.00799910099266
|
@@ -3039,6 +3205,7 @@ model-index:
|
|
3039 |
name: MTEB SCIDOCS
|
3040 |
config: default
|
3041 |
split: test
|
|
|
3042 |
metrics:
|
3043 |
- type: map_at_1
|
3044 |
value: 1.72
|
@@ -3095,6 +3262,7 @@ model-index:
|
|
3095 |
name: MTEB SICK-R
|
3096 |
config: default
|
3097 |
split: test
|
|
|
3098 |
metrics:
|
3099 |
- type: cos_sim_pearson
|
3100 |
value: 80.96286245858941
|
@@ -3115,6 +3283,7 @@ model-index:
|
|
3115 |
name: MTEB STS12
|
3116 |
config: default
|
3117 |
split: test
|
|
|
3118 |
metrics:
|
3119 |
- type: cos_sim_pearson
|
3120 |
value: 80.20938796088339
|
@@ -3135,6 +3304,7 @@ model-index:
|
|
3135 |
name: MTEB STS13
|
3136 |
config: default
|
3137 |
split: test
|
|
|
3138 |
metrics:
|
3139 |
- type: cos_sim_pearson
|
3140 |
value: 76.401935081936
|
@@ -3155,6 +3325,7 @@ model-index:
|
|
3155 |
name: MTEB STS14
|
3156 |
config: default
|
3157 |
split: test
|
|
|
3158 |
metrics:
|
3159 |
- type: cos_sim_pearson
|
3160 |
value: 75.35551963935667
|
@@ -3175,6 +3346,7 @@ model-index:
|
|
3175 |
name: MTEB STS15
|
3176 |
config: default
|
3177 |
split: test
|
|
|
3178 |
metrics:
|
3179 |
- type: cos_sim_pearson
|
3180 |
value: 79.05293131911803
|
@@ -3195,6 +3367,7 @@ model-index:
|
|
3195 |
name: MTEB STS16
|
3196 |
config: default
|
3197 |
split: test
|
|
|
3198 |
metrics:
|
3199 |
- type: cos_sim_pearson
|
3200 |
value: 76.04750373932828
|
@@ -3215,6 +3388,7 @@ model-index:
|
|
3215 |
name: MTEB STS17 (ko-ko)
|
3216 |
config: ko-ko
|
3217 |
split: test
|
|
|
3218 |
metrics:
|
3219 |
- type: cos_sim_pearson
|
3220 |
value: 43.0464619152799
|
@@ -3235,6 +3409,7 @@ model-index:
|
|
3235 |
name: MTEB STS17 (ar-ar)
|
3236 |
config: ar-ar
|
3237 |
split: test
|
|
|
3238 |
metrics:
|
3239 |
- type: cos_sim_pearson
|
3240 |
value: 53.27469278912148
|
@@ -3255,6 +3430,7 @@ model-index:
|
|
3255 |
name: MTEB STS17 (en-ar)
|
3256 |
config: en-ar
|
3257 |
split: test
|
|
|
3258 |
metrics:
|
3259 |
- type: cos_sim_pearson
|
3260 |
value: 1.5482997790039945
|
@@ -3275,6 +3451,7 @@ model-index:
|
|
3275 |
name: MTEB STS17 (en-de)
|
3276 |
config: en-de
|
3277 |
split: test
|
|
|
3278 |
metrics:
|
3279 |
- type: cos_sim_pearson
|
3280 |
value: 27.5420218362265
|
@@ -3295,6 +3472,7 @@ model-index:
|
|
3295 |
name: MTEB STS17 (en-en)
|
3296 |
config: en-en
|
3297 |
split: test
|
|
|
3298 |
metrics:
|
3299 |
- type: cos_sim_pearson
|
3300 |
value: 85.32029757646663
|
@@ -3315,6 +3493,7 @@ model-index:
|
|
3315 |
name: MTEB STS17 (en-tr)
|
3316 |
config: en-tr
|
3317 |
split: test
|
|
|
3318 |
metrics:
|
3319 |
- type: cos_sim_pearson
|
3320 |
value: 4.37162299241808
|
@@ -3335,6 +3514,7 @@ model-index:
|
|
3335 |
name: MTEB STS17 (es-en)
|
3336 |
config: es-en
|
3337 |
split: test
|
|
|
3338 |
metrics:
|
3339 |
- type: cos_sim_pearson
|
3340 |
value: 20.306030448858603
|
@@ -3355,6 +3535,7 @@ model-index:
|
|
3355 |
name: MTEB STS17 (es-es)
|
3356 |
config: es-es
|
3357 |
split: test
|
|
|
3358 |
metrics:
|
3359 |
- type: cos_sim_pearson
|
3360 |
value: 66.81873207478459
|
@@ -3375,6 +3556,7 @@ model-index:
|
|
3375 |
name: MTEB STS17 (fr-en)
|
3376 |
config: fr-en
|
3377 |
split: test
|
|
|
3378 |
metrics:
|
3379 |
- type: cos_sim_pearson
|
3380 |
value: 21.366487281202602
|
@@ -3395,6 +3577,7 @@ model-index:
|
|
3395 |
name: MTEB STS17 (it-en)
|
3396 |
config: it-en
|
3397 |
split: test
|
|
|
3398 |
metrics:
|
3399 |
- type: cos_sim_pearson
|
3400 |
value: 20.73153177251085
|
@@ -3415,6 +3598,7 @@ model-index:
|
|
3415 |
name: MTEB STS17 (nl-en)
|
3416 |
config: nl-en
|
3417 |
split: test
|
|
|
3418 |
metrics:
|
3419 |
- type: cos_sim_pearson
|
3420 |
value: 26.618435024084253
|
@@ -3435,6 +3619,7 @@ model-index:
|
|
3435 |
name: MTEB STS22 (en)
|
3436 |
config: en
|
3437 |
split: test
|
|
|
3438 |
metrics:
|
3439 |
- type: cos_sim_pearson
|
3440 |
value: 59.17638344661753
|
@@ -3455,6 +3640,7 @@ model-index:
|
|
3455 |
name: MTEB STS22 (de)
|
3456 |
config: de
|
3457 |
split: test
|
|
|
3458 |
metrics:
|
3459 |
- type: cos_sim_pearson
|
3460 |
value: 10.322254716987457
|
@@ -3475,6 +3661,7 @@ model-index:
|
|
3475 |
name: MTEB STS22 (es)
|
3476 |
config: es
|
3477 |
split: test
|
|
|
3478 |
metrics:
|
3479 |
- type: cos_sim_pearson
|
3480 |
value: 43.38031880545056
|
@@ -3495,6 +3682,7 @@ model-index:
|
|
3495 |
name: MTEB STS22 (pl)
|
3496 |
config: pl
|
3497 |
split: test
|
|
|
3498 |
metrics:
|
3499 |
- type: cos_sim_pearson
|
3500 |
value: 4.291290504363136
|
@@ -3515,6 +3703,7 @@ model-index:
|
|
3515 |
name: MTEB STS22 (tr)
|
3516 |
config: tr
|
3517 |
split: test
|
|
|
3518 |
metrics:
|
3519 |
- type: cos_sim_pearson
|
3520 |
value: 4.102739498555817
|
@@ -3535,6 +3724,7 @@ model-index:
|
|
3535 |
name: MTEB STS22 (ar)
|
3536 |
config: ar
|
3537 |
split: test
|
|
|
3538 |
metrics:
|
3539 |
- type: cos_sim_pearson
|
3540 |
value: 2.38765395226737
|
@@ -3555,6 +3745,7 @@ model-index:
|
|
3555 |
name: MTEB STS22 (ru)
|
3556 |
config: ru
|
3557 |
split: test
|
|
|
3558 |
metrics:
|
3559 |
- type: cos_sim_pearson
|
3560 |
value: 7.6735490672676345
|
@@ -3575,6 +3766,7 @@ model-index:
|
|
3575 |
name: MTEB STS22 (zh)
|
3576 |
config: zh
|
3577 |
split: test
|
|
|
3578 |
metrics:
|
3579 |
- type: cos_sim_pearson
|
3580 |
value: 0.06167614416104335
|
@@ -3595,6 +3787,7 @@ model-index:
|
|
3595 |
name: MTEB STS22 (fr)
|
3596 |
config: fr
|
3597 |
split: test
|
|
|
3598 |
metrics:
|
3599 |
- type: cos_sim_pearson
|
3600 |
value: 53.19490347682836
|
@@ -3615,6 +3808,7 @@ model-index:
|
|
3615 |
name: MTEB STS22 (de-en)
|
3616 |
config: de-en
|
3617 |
split: test
|
|
|
3618 |
metrics:
|
3619 |
- type: cos_sim_pearson
|
3620 |
value: 51.151158530122146
|
@@ -3635,6 +3829,7 @@ model-index:
|
|
3635 |
name: MTEB STS22 (es-en)
|
3636 |
config: es-en
|
3637 |
split: test
|
|
|
3638 |
metrics:
|
3639 |
- type: cos_sim_pearson
|
3640 |
value: 30.36194885126792
|
@@ -3655,6 +3850,7 @@ model-index:
|
|
3655 |
name: MTEB STS22 (it)
|
3656 |
config: it
|
3657 |
split: test
|
|
|
3658 |
metrics:
|
3659 |
- type: cos_sim_pearson
|
3660 |
value: 35.23883630335275
|
@@ -3675,6 +3871,7 @@ model-index:
|
|
3675 |
name: MTEB STS22 (pl-en)
|
3676 |
config: pl-en
|
3677 |
split: test
|
|
|
3678 |
metrics:
|
3679 |
- type: cos_sim_pearson
|
3680 |
value: 19.809302548119547
|
@@ -3695,6 +3892,7 @@ model-index:
|
|
3695 |
name: MTEB STS22 (zh-en)
|
3696 |
config: zh-en
|
3697 |
split: test
|
|
|
3698 |
metrics:
|
3699 |
- type: cos_sim_pearson
|
3700 |
value: 20.393500955410488
|
@@ -3715,6 +3913,7 @@ model-index:
|
|
3715 |
name: MTEB STS22 (es-it)
|
3716 |
config: es-it
|
3717 |
split: test
|
|
|
3718 |
metrics:
|
3719 |
- type: cos_sim_pearson
|
3720 |
value: 36.58919983075148
|
@@ -3735,6 +3934,7 @@ model-index:
|
|
3735 |
name: MTEB STS22 (de-fr)
|
3736 |
config: de-fr
|
3737 |
split: test
|
|
|
3738 |
metrics:
|
3739 |
- type: cos_sim_pearson
|
3740 |
value: 26.350936227950083
|
@@ -3755,6 +3955,7 @@ model-index:
|
|
3755 |
name: MTEB STS22 (de-pl)
|
3756 |
config: de-pl
|
3757 |
split: test
|
|
|
3758 |
metrics:
|
3759 |
- type: cos_sim_pearson
|
3760 |
value: 20.056269198600322
|
@@ -3775,6 +3976,7 @@ model-index:
|
|
3775 |
name: MTEB STS22 (fr-pl)
|
3776 |
config: fr-pl
|
3777 |
split: test
|
|
|
3778 |
metrics:
|
3779 |
- type: cos_sim_pearson
|
3780 |
value: 19.563740271419395
|
@@ -3795,6 +3997,7 @@ model-index:
|
|
3795 |
name: MTEB STSBenchmark
|
3796 |
config: default
|
3797 |
split: test
|
|
|
3798 |
metrics:
|
3799 |
- type: cos_sim_pearson
|
3800 |
value: 80.00905671833966
|
@@ -3815,6 +4018,7 @@ model-index:
|
|
3815 |
name: MTEB SciDocsRR
|
3816 |
config: default
|
3817 |
split: test
|
|
|
3818 |
metrics:
|
3819 |
- type: map
|
3820 |
value: 68.35710819755543
|
@@ -3827,6 +4031,7 @@ model-index:
|
|
3827 |
name: MTEB SciFact
|
3828 |
config: default
|
3829 |
split: test
|
|
|
3830 |
metrics:
|
3831 |
- type: map_at_1
|
3832 |
value: 21.556
|
@@ -3883,6 +4088,7 @@ model-index:
|
|
3883 |
name: MTEB SprintDuplicateQuestions
|
3884 |
config: default
|
3885 |
split: test
|
|
|
3886 |
metrics:
|
3887 |
- type: cos_sim_accuracy
|
3888 |
value: 99.49306930693069
|
@@ -3937,6 +4143,7 @@ model-index:
|
|
3937 |
name: MTEB StackExchangeClustering
|
3938 |
config: default
|
3939 |
split: test
|
|
|
3940 |
metrics:
|
3941 |
- type: v_measure
|
3942 |
value: 44.59127540530939
|
@@ -3947,6 +4154,7 @@ model-index:
|
|
3947 |
name: MTEB StackExchangeClusteringP2P
|
3948 |
config: default
|
3949 |
split: test
|
|
|
3950 |
metrics:
|
3951 |
- type: v_measure
|
3952 |
value: 28.230204578753636
|
@@ -3957,6 +4165,7 @@ model-index:
|
|
3957 |
name: MTEB StackOverflowDupQuestions
|
3958 |
config: default
|
3959 |
split: test
|
|
|
3960 |
metrics:
|
3961 |
- type: map
|
3962 |
value: 39.96520488022785
|
@@ -3969,6 +4178,7 @@ model-index:
|
|
3969 |
name: MTEB SummEval
|
3970 |
config: default
|
3971 |
split: test
|
|
|
3972 |
metrics:
|
3973 |
- type: cos_sim_pearson
|
3974 |
value: 30.56303767714449
|
@@ -3985,6 +4195,7 @@ model-index:
|
|
3985 |
name: MTEB TRECCOVID
|
3986 |
config: default
|
3987 |
split: test
|
|
|
3988 |
metrics:
|
3989 |
- type: map_at_1
|
3990 |
value: 0.11299999999999999
|
@@ -4041,6 +4252,7 @@ model-index:
|
|
4041 |
name: MTEB Touche2020
|
4042 |
config: default
|
4043 |
split: test
|
|
|
4044 |
metrics:
|
4045 |
- type: map_at_1
|
4046 |
value: 0.645
|
@@ -4097,6 +4309,7 @@ model-index:
|
|
4097 |
name: MTEB ToxicConversationsClassification
|
4098 |
config: default
|
4099 |
split: test
|
|
|
4100 |
metrics:
|
4101 |
- type: accuracy
|
4102 |
value: 62.7862
|
@@ -4111,6 +4324,7 @@ model-index:
|
|
4111 |
name: MTEB TweetSentimentExtractionClassification
|
4112 |
config: default
|
4113 |
split: test
|
|
|
4114 |
metrics:
|
4115 |
- type: accuracy
|
4116 |
value: 54.821731748726656
|
@@ -4123,6 +4337,7 @@ model-index:
|
|
4123 |
name: MTEB TwentyNewsgroupsClustering
|
4124 |
config: default
|
4125 |
split: test
|
|
|
4126 |
metrics:
|
4127 |
- type: v_measure
|
4128 |
value: 28.24295128553035
|
@@ -4133,6 +4348,7 @@ model-index:
|
|
4133 |
name: MTEB TwitterSemEval2015
|
4134 |
config: default
|
4135 |
split: test
|
|
|
4136 |
metrics:
|
4137 |
- type: cos_sim_accuracy
|
4138 |
value: 81.5640460153782
|
@@ -4187,6 +4403,7 @@ model-index:
|
|
4187 |
name: MTEB TwitterURLCorpus
|
4188 |
config: default
|
4189 |
split: test
|
|
|
4190 |
metrics:
|
4191 |
- type: cos_sim_accuracy
|
4192 |
value: 86.63018589668955
|
|
|
15 |
name: MTEB AmazonCounterfactualClassification (en)
|
16 |
config: en
|
17 |
split: test
|
18 |
+
revision: 2d8a100785abf0ae21420d2a55b0c56e3e1ea996
|
19 |
metrics:
|
20 |
- type: accuracy
|
21 |
value: 65.88059701492537
|
|
|
30 |
name: MTEB AmazonCounterfactualClassification (de)
|
31 |
config: de
|
32 |
split: test
|
33 |
+
revision: 2d8a100785abf0ae21420d2a55b0c56e3e1ea996
|
34 |
metrics:
|
35 |
- type: accuracy
|
36 |
value: 59.07922912205568
|
|
|
45 |
name: MTEB AmazonCounterfactualClassification (en-ext)
|
46 |
config: en-ext
|
47 |
split: test
|
48 |
+
revision: 2d8a100785abf0ae21420d2a55b0c56e3e1ea996
|
49 |
metrics:
|
50 |
- type: accuracy
|
51 |
value: 64.91754122938531
|
|
|
60 |
name: MTEB AmazonCounterfactualClassification (ja)
|
61 |
config: ja
|
62 |
split: test
|
63 |
+
revision: 2d8a100785abf0ae21420d2a55b0c56e3e1ea996
|
64 |
metrics:
|
65 |
- type: accuracy
|
66 |
value: 56.423982869378996
|
|
|
75 |
name: MTEB AmazonPolarityClassification
|
76 |
config: default
|
77 |
split: test
|
78 |
+
revision: 80714f8dcf8cefc218ef4f8c5a966dd83f75a0e1
|
79 |
metrics:
|
80 |
- type: accuracy
|
81 |
value: 74.938225
|
|
|
90 |
name: MTEB AmazonReviewsClassification (en)
|
91 |
config: en
|
92 |
split: test
|
93 |
+
revision: c379a6705fec24a2493fa68e011692605f44e119
|
94 |
metrics:
|
95 |
- type: accuracy
|
96 |
value: 35.098
|
|
|
103 |
name: MTEB AmazonReviewsClassification (de)
|
104 |
config: de
|
105 |
split: test
|
106 |
+
revision: c379a6705fec24a2493fa68e011692605f44e119
|
107 |
metrics:
|
108 |
- type: accuracy
|
109 |
value: 24.516
|
|
|
116 |
name: MTEB AmazonReviewsClassification (es)
|
117 |
config: es
|
118 |
split: test
|
119 |
+
revision: c379a6705fec24a2493fa68e011692605f44e119
|
120 |
metrics:
|
121 |
- type: accuracy
|
122 |
value: 29.097999999999995
|
|
|
129 |
name: MTEB AmazonReviewsClassification (fr)
|
130 |
config: fr
|
131 |
split: test
|
132 |
+
revision: c379a6705fec24a2493fa68e011692605f44e119
|
133 |
metrics:
|
134 |
- type: accuracy
|
135 |
value: 27.395999999999997
|
|
|
142 |
name: MTEB AmazonReviewsClassification (ja)
|
143 |
config: ja
|
144 |
split: test
|
145 |
+
revision: c379a6705fec24a2493fa68e011692605f44e119
|
146 |
metrics:
|
147 |
- type: accuracy
|
148 |
value: 21.724
|
|
|
155 |
name: MTEB AmazonReviewsClassification (zh)
|
156 |
config: zh
|
157 |
split: test
|
158 |
+
revision: c379a6705fec24a2493fa68e011692605f44e119
|
159 |
metrics:
|
160 |
- type: accuracy
|
161 |
value: 23.976
|
|
|
168 |
name: MTEB ArguAna
|
169 |
config: default
|
170 |
split: test
|
171 |
+
revision: 5b3e3697907184a9b77a3c99ee9ea1a9cbb1e4e3
|
172 |
metrics:
|
173 |
- type: map_at_1
|
174 |
value: 13.442000000000002
|
|
|
225 |
name: MTEB ArxivClusteringP2P
|
226 |
config: default
|
227 |
split: test
|
228 |
+
revision: 0bbdb47bcbe3a90093699aefeed338a0f28a7ee8
|
229 |
metrics:
|
230 |
- type: v_measure
|
231 |
value: 34.742482477870766
|
|
|
236 |
name: MTEB ArxivClusteringS2S
|
237 |
config: default
|
238 |
split: test
|
239 |
+
revision: b73bd54100e5abfa6e3a23dcafb46fe4d2438dc3
|
240 |
metrics:
|
241 |
- type: v_measure
|
242 |
value: 24.67870651472156
|
|
|
247 |
name: MTEB AskUbuntuDupQuestions
|
248 |
config: default
|
249 |
split: test
|
250 |
+
revision: 4d853f94cd57d85ec13805aeeac3ae3e5eb4c49c
|
251 |
metrics:
|
252 |
- type: map
|
253 |
value: 52.63439984994702
|
|
|
260 |
name: MTEB BIOSSES
|
261 |
config: default
|
262 |
split: test
|
263 |
+
revision: 9ee918f184421b6bd48b78f6c714d86546106103
|
264 |
metrics:
|
265 |
- type: cos_sim_pearson
|
266 |
value: 72.78000135012542
|
|
|
281 |
name: MTEB BUCC (de-en)
|
282 |
config: de-en
|
283 |
split: test
|
284 |
+
revision: d51519689f32196a32af33b075a01d0e7c51e252
|
285 |
metrics:
|
286 |
- type: accuracy
|
287 |
value: 1.0960334029227559
|
|
|
298 |
name: MTEB BUCC (fr-en)
|
299 |
config: fr-en
|
300 |
split: test
|
301 |
+
revision: d51519689f32196a32af33b075a01d0e7c51e252
|
302 |
metrics:
|
303 |
- type: accuracy
|
304 |
value: 0.02201188641866608
|
|
|
315 |
name: MTEB BUCC (ru-en)
|
316 |
config: ru-en
|
317 |
split: test
|
318 |
+
revision: d51519689f32196a32af33b075a01d0e7c51e252
|
319 |
metrics:
|
320 |
- type: accuracy
|
321 |
value: 0.0
|
|
|
332 |
name: MTEB BUCC (zh-en)
|
333 |
config: zh-en
|
334 |
split: test
|
335 |
+
revision: d51519689f32196a32af33b075a01d0e7c51e252
|
336 |
metrics:
|
337 |
- type: accuracy
|
338 |
value: 0.0
|
|
|
349 |
name: MTEB Banking77Classification
|
350 |
config: default
|
351 |
split: test
|
352 |
+
revision: 44fa15921b4c889113cc5df03dd4901b49161ab7
|
353 |
metrics:
|
354 |
- type: accuracy
|
355 |
value: 74.67857142857142
|
|
|
362 |
name: MTEB BiorxivClusteringP2P
|
363 |
config: default
|
364 |
split: test
|
365 |
+
revision: 11d0121201d1f1f280e8cc8f3d98fb9c4d9f9c55
|
366 |
metrics:
|
367 |
- type: v_measure
|
368 |
value: 28.93427045246491
|
|
|
373 |
name: MTEB BiorxivClusteringS2S
|
374 |
config: default
|
375 |
split: test
|
376 |
+
revision: c0fab014e1bcb8d3a5e31b2088972a1e01547dc1
|
377 |
metrics:
|
378 |
- type: v_measure
|
379 |
value: 23.080939123955474
|
|
|
384 |
name: MTEB CQADupstackAndroidRetrieval
|
385 |
config: default
|
386 |
split: test
|
387 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
388 |
metrics:
|
389 |
- type: map_at_1
|
390 |
value: 18.221999999999998
|
|
|
441 |
name: MTEB CQADupstackEnglishRetrieval
|
442 |
config: default
|
443 |
split: test
|
444 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
445 |
metrics:
|
446 |
- type: map_at_1
|
447 |
value: 12.058
|
|
|
498 |
name: MTEB CQADupstackGamingRetrieval
|
499 |
config: default
|
500 |
split: test
|
501 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
502 |
metrics:
|
503 |
- type: map_at_1
|
504 |
value: 21.183
|
|
|
555 |
name: MTEB CQADupstackGisRetrieval
|
556 |
config: default
|
557 |
split: test
|
558 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
559 |
metrics:
|
560 |
- type: map_at_1
|
561 |
value: 11.350999999999999
|
|
|
612 |
name: MTEB CQADupstackMathematicaRetrieval
|
613 |
config: default
|
614 |
split: test
|
615 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
616 |
metrics:
|
617 |
- type: map_at_1
|
618 |
value: 8.08
|
|
|
669 |
name: MTEB CQADupstackPhysicsRetrieval
|
670 |
config: default
|
671 |
split: test
|
672 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
673 |
metrics:
|
674 |
- type: map_at_1
|
675 |
value: 13.908999999999999
|
|
|
726 |
name: MTEB CQADupstackProgrammersRetrieval
|
727 |
config: default
|
728 |
split: test
|
729 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
730 |
metrics:
|
731 |
- type: map_at_1
|
732 |
value: 12.598
|
|
|
783 |
name: MTEB CQADupstackRetrieval
|
784 |
config: default
|
785 |
split: test
|
786 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
787 |
metrics:
|
788 |
- type: map_at_1
|
789 |
value: 12.738416666666666
|
|
|
840 |
name: MTEB CQADupstackStatsRetrieval
|
841 |
config: default
|
842 |
split: test
|
843 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
844 |
metrics:
|
845 |
- type: map_at_1
|
846 |
value: 12.307
|
|
|
897 |
name: MTEB CQADupstackTexRetrieval
|
898 |
config: default
|
899 |
split: test
|
900 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
901 |
metrics:
|
902 |
- type: map_at_1
|
903 |
value: 6.496
|
|
|
954 |
name: MTEB CQADupstackUnixRetrieval
|
955 |
config: default
|
956 |
split: test
|
957 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
958 |
metrics:
|
959 |
- type: map_at_1
|
960 |
value: 13.843
|
|
|
1011 |
name: MTEB CQADupstackWebmastersRetrieval
|
1012 |
config: default
|
1013 |
split: test
|
1014 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
1015 |
metrics:
|
1016 |
- type: map_at_1
|
1017 |
value: 13.757
|
|
|
1068 |
name: MTEB CQADupstackWordpressRetrieval
|
1069 |
config: default
|
1070 |
split: test
|
1071 |
+
revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
|
1072 |
metrics:
|
1073 |
- type: map_at_1
|
1074 |
value: 9.057
|
|
|
1125 |
name: MTEB ClimateFEVER
|
1126 |
config: default
|
1127 |
split: test
|
1128 |
+
revision: 392b78eb68c07badcd7c2cd8f39af108375dfcce
|
1129 |
metrics:
|
1130 |
- type: map_at_1
|
1131 |
value: 3.714
|
|
|
1182 |
name: MTEB DBPedia
|
1183 |
config: default
|
1184 |
split: test
|
1185 |
+
revision: f097057d03ed98220bc7309ddb10b71a54d667d6
|
1186 |
metrics:
|
1187 |
- type: map_at_1
|
1188 |
value: 1.764
|
|
|
1239 |
name: MTEB EmotionClassification
|
1240 |
config: default
|
1241 |
split: test
|
1242 |
+
revision: 829147f8f75a25f005913200eb5ed41fae320aa1
|
1243 |
metrics:
|
1244 |
- type: accuracy
|
1245 |
value: 42.225
|
|
|
1252 |
name: MTEB FEVER
|
1253 |
config: default
|
1254 |
split: test
|
1255 |
+
revision: 1429cf27e393599b8b359b9b72c666f96b2525f9
|
1256 |
metrics:
|
1257 |
- type: map_at_1
|
1258 |
value: 11.497
|
|
|
1309 |
name: MTEB FiQA2018
|
1310 |
config: default
|
1311 |
split: test
|
1312 |
+
revision: 41b686a7f28c59bcaaa5791efd47c67c8ebe28be
|
1313 |
metrics:
|
1314 |
- type: map_at_1
|
1315 |
value: 3.637
|
|
|
1366 |
name: MTEB HotpotQA
|
1367 |
config: default
|
1368 |
split: test
|
1369 |
+
revision: 766870b35a1b9ca65e67a0d1913899973551fc6c
|
1370 |
metrics:
|
1371 |
- type: map_at_1
|
1372 |
value: 9.676
|
|
|
1423 |
name: MTEB ImdbClassification
|
1424 |
config: default
|
1425 |
split: test
|
1426 |
+
revision: 8d743909f834c38949e8323a8a6ce8721ea6c7f4
|
1427 |
metrics:
|
1428 |
- type: accuracy
|
1429 |
value: 62.895999999999994
|
|
|
1438 |
name: MTEB MSMARCO
|
1439 |
config: default
|
1440 |
split: validation
|
1441 |
+
revision: e6838a846e2408f22cf5cc337ebc83e0bcf77849
|
1442 |
metrics:
|
1443 |
- type: map_at_1
|
1444 |
value: 2.88
|
|
|
1495 |
name: MTEB MTOPDomainClassification (en)
|
1496 |
config: en
|
1497 |
split: test
|
1498 |
+
revision: a7e2a951126a26fc8c6a69f835f33a346ba259e3
|
1499 |
metrics:
|
1500 |
- type: accuracy
|
1501 |
value: 81.51846785225717
|
|
|
1508 |
name: MTEB MTOPDomainClassification (de)
|
1509 |
config: de
|
1510 |
split: test
|
1511 |
+
revision: a7e2a951126a26fc8c6a69f835f33a346ba259e3
|
1512 |
metrics:
|
1513 |
- type: accuracy
|
1514 |
value: 60.37475345167653
|
|
|
1521 |
name: MTEB MTOPDomainClassification (es)
|
1522 |
config: es
|
1523 |
split: test
|
1524 |
+
revision: a7e2a951126a26fc8c6a69f835f33a346ba259e3
|
1525 |
metrics:
|
1526 |
- type: accuracy
|
1527 |
value: 67.36824549699799
|
|
|
1534 |
name: MTEB MTOPDomainClassification (fr)
|
1535 |
config: fr
|
1536 |
split: test
|
1537 |
+
revision: a7e2a951126a26fc8c6a69f835f33a346ba259e3
|
1538 |
metrics:
|
1539 |
- type: accuracy
|
1540 |
value: 63.12871907297212
|
|
|
1547 |
name: MTEB MTOPDomainClassification (hi)
|
1548 |
config: hi
|
1549 |
split: test
|
1550 |
+
revision: a7e2a951126a26fc8c6a69f835f33a346ba259e3
|
1551 |
metrics:
|
1552 |
- type: accuracy
|
1553 |
value: 47.04553603442094
|
|
|
1560 |
name: MTEB MTOPDomainClassification (th)
|
1561 |
config: th
|
1562 |
split: test
|
1563 |
+
revision: a7e2a951126a26fc8c6a69f835f33a346ba259e3
|
1564 |
metrics:
|
1565 |
- type: accuracy
|
1566 |
value: 52.282097649186255
|
|
|
1573 |
name: MTEB MTOPIntentClassification (en)
|
1574 |
config: en
|
1575 |
split: test
|
1576 |
+
revision: 6299947a7777084cc2d4b64235bf7190381ce755
|
1577 |
metrics:
|
1578 |
- type: accuracy
|
1579 |
value: 58.2421340629275
|
|
|
1586 |
name: MTEB MTOPIntentClassification (de)
|
1587 |
config: de
|
1588 |
split: test
|
1589 |
+
revision: 6299947a7777084cc2d4b64235bf7190381ce755
|
1590 |
metrics:
|
1591 |
- type: accuracy
|
1592 |
value: 45.069033530571986
|
|
|
1599 |
name: MTEB MTOPIntentClassification (es)
|
1600 |
config: es
|
1601 |
split: test
|
1602 |
+
revision: 6299947a7777084cc2d4b64235bf7190381ce755
|
1603 |
metrics:
|
1604 |
- type: accuracy
|
1605 |
value: 48.80920613742495
|
|
|
1612 |
name: MTEB MTOPIntentClassification (fr)
|
1613 |
config: fr
|
1614 |
split: test
|
1615 |
+
revision: 6299947a7777084cc2d4b64235bf7190381ce755
|
1616 |
metrics:
|
1617 |
- type: accuracy
|
1618 |
value: 44.337613529595984
|
|
|
1625 |
name: MTEB MTOPIntentClassification (hi)
|
1626 |
config: hi
|
1627 |
split: test
|
1628 |
+
revision: 6299947a7777084cc2d4b64235bf7190381ce755
|
1629 |
metrics:
|
1630 |
- type: accuracy
|
1631 |
value: 34.198637504481894
|
|
|
1638 |
name: MTEB MTOPIntentClassification (th)
|
1639 |
config: th
|
1640 |
split: test
|
1641 |
+
revision: 6299947a7777084cc2d4b64235bf7190381ce755
|
1642 |
metrics:
|
1643 |
- type: accuracy
|
1644 |
value: 43.11030741410488
|
|
|
1651 |
name: MTEB MassiveIntentClassification (af)
|
1652 |
config: af
|
1653 |
split: test
|
1654 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1655 |
metrics:
|
1656 |
- type: accuracy
|
1657 |
value: 37.79421654337593
|
|
|
1664 |
name: MTEB MassiveIntentClassification (am)
|
1665 |
config: am
|
1666 |
split: test
|
1667 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1668 |
metrics:
|
1669 |
- type: accuracy
|
1670 |
value: 23.722259583053127
|
|
|
1677 |
name: MTEB MassiveIntentClassification (ar)
|
1678 |
config: ar
|
1679 |
split: test
|
1680 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1681 |
metrics:
|
1682 |
- type: accuracy
|
1683 |
value: 29.64021519838601
|
|
|
1690 |
name: MTEB MassiveIntentClassification (az)
|
1691 |
config: az
|
1692 |
split: test
|
1693 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1694 |
metrics:
|
1695 |
- type: accuracy
|
1696 |
value: 39.4754539340955
|
|
|
1703 |
name: MTEB MassiveIntentClassification (bn)
|
1704 |
config: bn
|
1705 |
split: test
|
1706 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1707 |
metrics:
|
1708 |
- type: accuracy
|
1709 |
value: 26.550100874243444
|
|
|
1716 |
name: MTEB MassiveIntentClassification (cy)
|
1717 |
config: cy
|
1718 |
split: test
|
1719 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1720 |
metrics:
|
1721 |
- type: accuracy
|
1722 |
value: 38.78278412911904
|
|
|
1729 |
name: MTEB MassiveIntentClassification (da)
|
1730 |
config: da
|
1731 |
split: test
|
1732 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1733 |
metrics:
|
1734 |
- type: accuracy
|
1735 |
value: 43.557498318762605
|
|
|
1742 |
name: MTEB MassiveIntentClassification (de)
|
1743 |
config: de
|
1744 |
split: test
|
1745 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1746 |
metrics:
|
1747 |
- type: accuracy
|
1748 |
value: 40.39340954942838
|
|
|
1755 |
name: MTEB MassiveIntentClassification (el)
|
1756 |
config: el
|
1757 |
split: test
|
1758 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1759 |
metrics:
|
1760 |
- type: accuracy
|
1761 |
value: 37.28648285137861
|
|
|
1768 |
name: MTEB MassiveIntentClassification (en)
|
1769 |
config: en
|
1770 |
split: test
|
1771 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1772 |
metrics:
|
1773 |
- type: accuracy
|
1774 |
value: 58.080026899798256
|
|
|
1781 |
name: MTEB MassiveIntentClassification (es)
|
1782 |
config: es
|
1783 |
split: test
|
1784 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1785 |
metrics:
|
1786 |
- type: accuracy
|
1787 |
value: 41.176866173503704
|
|
|
1794 |
name: MTEB MassiveIntentClassification (fa)
|
1795 |
config: fa
|
1796 |
split: test
|
1797 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1798 |
metrics:
|
1799 |
- type: accuracy
|
1800 |
value: 36.422326832548755
|
|
|
1807 |
name: MTEB MassiveIntentClassification (fi)
|
1808 |
config: fi
|
1809 |
split: test
|
1810 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1811 |
metrics:
|
1812 |
- type: accuracy
|
1813 |
value: 38.75588433086752
|
|
|
1820 |
name: MTEB MassiveIntentClassification (fr)
|
1821 |
config: fr
|
1822 |
split: test
|
1823 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1824 |
metrics:
|
1825 |
- type: accuracy
|
1826 |
value: 43.67182246133153
|
|
|
1833 |
name: MTEB MassiveIntentClassification (he)
|
1834 |
config: he
|
1835 |
split: test
|
1836 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1837 |
metrics:
|
1838 |
- type: accuracy
|
1839 |
value: 31.980497646267658
|
|
|
1846 |
name: MTEB MassiveIntentClassification (hi)
|
1847 |
config: hi
|
1848 |
split: test
|
1849 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1850 |
metrics:
|
1851 |
- type: accuracy
|
1852 |
value: 28.039677202420982
|
|
|
1859 |
name: MTEB MassiveIntentClassification (hu)
|
1860 |
config: hu
|
1861 |
split: test
|
1862 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1863 |
metrics:
|
1864 |
- type: accuracy
|
1865 |
value: 38.13718897108272
|
|
|
1872 |
name: MTEB MassiveIntentClassification (hy)
|
1873 |
config: hy
|
1874 |
split: test
|
1875 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1876 |
metrics:
|
1877 |
- type: accuracy
|
1878 |
value: 26.05245460659045
|
|
|
1885 |
name: MTEB MassiveIntentClassification (id)
|
1886 |
config: id
|
1887 |
split: test
|
1888 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1889 |
metrics:
|
1890 |
- type: accuracy
|
1891 |
value: 41.156691324815064
|
|
|
1898 |
name: MTEB MassiveIntentClassification (is)
|
1899 |
config: is
|
1900 |
split: test
|
1901 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1902 |
metrics:
|
1903 |
- type: accuracy
|
1904 |
value: 38.62811028917284
|
|
|
1911 |
name: MTEB MassiveIntentClassification (it)
|
1912 |
config: it
|
1913 |
split: test
|
1914 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1915 |
metrics:
|
1916 |
- type: accuracy
|
1917 |
value: 44.0383322125084
|
|
|
1924 |
name: MTEB MassiveIntentClassification (ja)
|
1925 |
config: ja
|
1926 |
split: test
|
1927 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1928 |
metrics:
|
1929 |
- type: accuracy
|
1930 |
value: 46.20712844653666
|
|
|
1937 |
name: MTEB MassiveIntentClassification (jv)
|
1938 |
config: jv
|
1939 |
split: test
|
1940 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1941 |
metrics:
|
1942 |
- type: accuracy
|
1943 |
value: 37.60591795561533
|
|
|
1950 |
name: MTEB MassiveIntentClassification (ka)
|
1951 |
config: ka
|
1952 |
split: test
|
1953 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1954 |
metrics:
|
1955 |
- type: accuracy
|
1956 |
value: 24.47209145931405
|
|
|
1963 |
name: MTEB MassiveIntentClassification (km)
|
1964 |
config: km
|
1965 |
split: test
|
1966 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1967 |
metrics:
|
1968 |
- type: accuracy
|
1969 |
value: 26.23739071956961
|
|
|
1976 |
name: MTEB MassiveIntentClassification (kn)
|
1977 |
config: kn
|
1978 |
split: test
|
1979 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1980 |
metrics:
|
1981 |
- type: accuracy
|
1982 |
value: 17.831203765971754
|
|
|
1989 |
name: MTEB MassiveIntentClassification (ko)
|
1990 |
config: ko
|
1991 |
split: test
|
1992 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
1993 |
metrics:
|
1994 |
- type: accuracy
|
1995 |
value: 37.266308002689975
|
|
|
2002 |
name: MTEB MassiveIntentClassification (lv)
|
2003 |
config: lv
|
2004 |
split: test
|
2005 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2006 |
metrics:
|
2007 |
- type: accuracy
|
2008 |
value: 40.93140551445864
|
|
|
2015 |
name: MTEB MassiveIntentClassification (ml)
|
2016 |
config: ml
|
2017 |
split: test
|
2018 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2019 |
metrics:
|
2020 |
- type: accuracy
|
2021 |
value: 17.88500336247478
|
|
|
2028 |
name: MTEB MassiveIntentClassification (mn)
|
2029 |
config: mn
|
2030 |
split: test
|
2031 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2032 |
metrics:
|
2033 |
- type: accuracy
|
2034 |
value: 32.975790181573636
|
|
|
2041 |
name: MTEB MassiveIntentClassification (ms)
|
2042 |
config: ms
|
2043 |
split: test
|
2044 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2045 |
metrics:
|
2046 |
- type: accuracy
|
2047 |
value: 40.91123066577001
|
|
|
2054 |
name: MTEB MassiveIntentClassification (my)
|
2055 |
config: my
|
2056 |
split: test
|
2057 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2058 |
metrics:
|
2059 |
- type: accuracy
|
2060 |
value: 17.834566240753194
|
|
|
2067 |
name: MTEB MassiveIntentClassification (nb)
|
2068 |
config: nb
|
2069 |
split: test
|
2070 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2071 |
metrics:
|
2072 |
- type: accuracy
|
2073 |
value: 39.47881640887693
|
|
|
2080 |
name: MTEB MassiveIntentClassification (nl)
|
2081 |
config: nl
|
2082 |
split: test
|
2083 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2084 |
metrics:
|
2085 |
- type: accuracy
|
2086 |
value: 41.76193678547412
|
|
|
2093 |
name: MTEB MassiveIntentClassification (pl)
|
2094 |
config: pl
|
2095 |
split: test
|
2096 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2097 |
metrics:
|
2098 |
- type: accuracy
|
2099 |
value: 42.61936785474109
|
|
|
2106 |
name: MTEB MassiveIntentClassification (pt)
|
2107 |
config: pt
|
2108 |
split: test
|
2109 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2110 |
metrics:
|
2111 |
- type: accuracy
|
2112 |
value: 44.54270342972427
|
|
|
2119 |
name: MTEB MassiveIntentClassification (ro)
|
2120 |
config: ro
|
2121 |
split: test
|
2122 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2123 |
metrics:
|
2124 |
- type: accuracy
|
2125 |
value: 39.96973772696705
|
|
|
2132 |
name: MTEB MassiveIntentClassification (ru)
|
2133 |
config: ru
|
2134 |
split: test
|
2135 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2136 |
metrics:
|
2137 |
- type: accuracy
|
2138 |
value: 37.461331540013454
|
|
|
2145 |
name: MTEB MassiveIntentClassification (sl)
|
2146 |
config: sl
|
2147 |
split: test
|
2148 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2149 |
metrics:
|
2150 |
- type: accuracy
|
2151 |
value: 38.28850033624748
|
|
|
2158 |
name: MTEB MassiveIntentClassification (sq)
|
2159 |
config: sq
|
2160 |
split: test
|
2161 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2162 |
metrics:
|
2163 |
- type: accuracy
|
2164 |
value: 40.95494283792872
|
|
|
2171 |
name: MTEB MassiveIntentClassification (sv)
|
2172 |
config: sv
|
2173 |
split: test
|
2174 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2175 |
metrics:
|
2176 |
- type: accuracy
|
2177 |
value: 41.85272360457296
|
|
|
2184 |
name: MTEB MassiveIntentClassification (sw)
|
2185 |
config: sw
|
2186 |
split: test
|
2187 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2188 |
metrics:
|
2189 |
- type: accuracy
|
2190 |
value: 38.328850033624754
|
|
|
2197 |
name: MTEB MassiveIntentClassification (ta)
|
2198 |
config: ta
|
2199 |
split: test
|
2200 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2201 |
metrics:
|
2202 |
- type: accuracy
|
2203 |
value: 19.031607262945528
|
|
|
2210 |
name: MTEB MassiveIntentClassification (te)
|
2211 |
config: te
|
2212 |
split: test
|
2213 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2214 |
metrics:
|
2215 |
- type: accuracy
|
2216 |
value: 19.38466711499664
|
|
|
2223 |
name: MTEB MassiveIntentClassification (th)
|
2224 |
config: th
|
2225 |
split: test
|
2226 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2227 |
metrics:
|
2228 |
- type: accuracy
|
2229 |
value: 34.088769334229994
|
|
|
2236 |
name: MTEB MassiveIntentClassification (tl)
|
2237 |
config: tl
|
2238 |
split: test
|
2239 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2240 |
metrics:
|
2241 |
- type: accuracy
|
2242 |
value: 40.285810356422324
|
|
|
2249 |
name: MTEB MassiveIntentClassification (tr)
|
2250 |
config: tr
|
2251 |
split: test
|
2252 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2253 |
metrics:
|
2254 |
- type: accuracy
|
2255 |
value: 38.860121049092136
|
|
|
2262 |
name: MTEB MassiveIntentClassification (ur)
|
2263 |
config: ur
|
2264 |
split: test
|
2265 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2266 |
metrics:
|
2267 |
- type: accuracy
|
2268 |
value: 27.834566240753194
|
|
|
2275 |
name: MTEB MassiveIntentClassification (vi)
|
2276 |
config: vi
|
2277 |
split: test
|
2278 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2279 |
metrics:
|
2280 |
- type: accuracy
|
2281 |
value: 38.70544720914593
|
|
|
2288 |
name: MTEB MassiveIntentClassification (zh-CN)
|
2289 |
config: zh-CN
|
2290 |
split: test
|
2291 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2292 |
metrics:
|
2293 |
- type: accuracy
|
2294 |
value: 45.78009414929387
|
|
|
2301 |
name: MTEB MassiveIntentClassification (zh-TW)
|
2302 |
config: zh-TW
|
2303 |
split: test
|
2304 |
+
revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
|
2305 |
metrics:
|
2306 |
- type: accuracy
|
2307 |
value: 42.32010759919301
|
|
|
2314 |
name: MTEB MassiveScenarioClassification (af)
|
2315 |
config: af
|
2316 |
split: test
|
2317 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2318 |
metrics:
|
2319 |
- type: accuracy
|
2320 |
value: 40.24546065904506
|
|
|
2327 |
name: MTEB MassiveScenarioClassification (am)
|
2328 |
config: am
|
2329 |
split: test
|
2330 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2331 |
metrics:
|
2332 |
- type: accuracy
|
2333 |
value: 25.68930733019502
|
|
|
2340 |
name: MTEB MassiveScenarioClassification (ar)
|
2341 |
config: ar
|
2342 |
split: test
|
2343 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2344 |
metrics:
|
2345 |
- type: accuracy
|
2346 |
value: 32.39744451916611
|
|
|
2353 |
name: MTEB MassiveScenarioClassification (az)
|
2354 |
config: az
|
2355 |
split: test
|
2356 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2357 |
metrics:
|
2358 |
- type: accuracy
|
2359 |
value: 40.53127101546738
|
|
|
2366 |
name: MTEB MassiveScenarioClassification (bn)
|
2367 |
config: bn
|
2368 |
split: test
|
2369 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2370 |
metrics:
|
2371 |
- type: accuracy
|
2372 |
value: 27.23268325487559
|
|
|
2379 |
name: MTEB MassiveScenarioClassification (cy)
|
2380 |
config: cy
|
2381 |
split: test
|
2382 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2383 |
metrics:
|
2384 |
- type: accuracy
|
2385 |
value: 38.69872225958305
|
|
|
2392 |
name: MTEB MassiveScenarioClassification (da)
|
2393 |
config: da
|
2394 |
split: test
|
2395 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2396 |
metrics:
|
2397 |
- type: accuracy
|
2398 |
value: 44.75453934095494
|
|
|
2405 |
name: MTEB MassiveScenarioClassification (de)
|
2406 |
config: de
|
2407 |
split: test
|
2408 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2409 |
metrics:
|
2410 |
- type: accuracy
|
2411 |
value: 41.355077336919976
|
|
|
2418 |
name: MTEB MassiveScenarioClassification (el)
|
2419 |
config: el
|
2420 |
split: test
|
2421 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2422 |
metrics:
|
2423 |
- type: accuracy
|
2424 |
value: 38.43981170141224
|
|
|
2431 |
name: MTEB MassiveScenarioClassification (en)
|
2432 |
config: en
|
2433 |
split: test
|
2434 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2435 |
metrics:
|
2436 |
- type: accuracy
|
2437 |
value: 66.33826496301278
|
|
|
2444 |
name: MTEB MassiveScenarioClassification (es)
|
2445 |
config: es
|
2446 |
split: test
|
2447 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2448 |
metrics:
|
2449 |
- type: accuracy
|
2450 |
value: 44.17955615332885
|
|
|
2457 |
name: MTEB MassiveScenarioClassification (fa)
|
2458 |
config: fa
|
2459 |
split: test
|
2460 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2461 |
metrics:
|
2462 |
- type: accuracy
|
2463 |
value: 34.82851378614661
|
|
|
2470 |
name: MTEB MassiveScenarioClassification (fi)
|
2471 |
config: fi
|
2472 |
split: test
|
2473 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2474 |
metrics:
|
2475 |
- type: accuracy
|
2476 |
value: 40.561533288500335
|
|
|
2483 |
name: MTEB MassiveScenarioClassification (fr)
|
2484 |
config: fr
|
2485 |
split: test
|
2486 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2487 |
metrics:
|
2488 |
- type: accuracy
|
2489 |
value: 45.917955615332886
|
|
|
2496 |
name: MTEB MassiveScenarioClassification (he)
|
2497 |
config: he
|
2498 |
split: test
|
2499 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2500 |
metrics:
|
2501 |
- type: accuracy
|
2502 |
value: 32.08473436449227
|
|
|
2509 |
name: MTEB MassiveScenarioClassification (hi)
|
2510 |
config: hi
|
2511 |
split: test
|
2512 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2513 |
metrics:
|
2514 |
- type: accuracy
|
2515 |
value: 28.369199731002016
|
|
|
2522 |
name: MTEB MassiveScenarioClassification (hu)
|
2523 |
config: hu
|
2524 |
split: test
|
2525 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2526 |
metrics:
|
2527 |
- type: accuracy
|
2528 |
value: 39.49226630800269
|
|
|
2535 |
name: MTEB MassiveScenarioClassification (hy)
|
2536 |
config: hy
|
2537 |
split: test
|
2538 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2539 |
metrics:
|
2540 |
- type: accuracy
|
2541 |
value: 25.904505716207133
|
|
|
2548 |
name: MTEB MassiveScenarioClassification (id)
|
2549 |
config: id
|
2550 |
split: test
|
2551 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2552 |
metrics:
|
2553 |
- type: accuracy
|
2554 |
value: 40.95830531271016
|
|
|
2561 |
name: MTEB MassiveScenarioClassification (is)
|
2562 |
config: is
|
2563 |
split: test
|
2564 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2565 |
metrics:
|
2566 |
- type: accuracy
|
2567 |
value: 38.564223268325485
|
|
|
2574 |
name: MTEB MassiveScenarioClassification (it)
|
2575 |
config: it
|
2576 |
split: test
|
2577 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2578 |
metrics:
|
2579 |
- type: accuracy
|
2580 |
value: 46.58708809683928
|
|
|
2587 |
name: MTEB MassiveScenarioClassification (ja)
|
2588 |
config: ja
|
2589 |
split: test
|
2590 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2591 |
metrics:
|
2592 |
- type: accuracy
|
2593 |
value: 46.24747814391393
|
|
|
2600 |
name: MTEB MassiveScenarioClassification (jv)
|
2601 |
config: jv
|
2602 |
split: test
|
2603 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2604 |
metrics:
|
2605 |
- type: accuracy
|
2606 |
value: 39.6570275722932
|
|
|
2613 |
name: MTEB MassiveScenarioClassification (ka)
|
2614 |
config: ka
|
2615 |
split: test
|
2616 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2617 |
metrics:
|
2618 |
- type: accuracy
|
2619 |
value: 25.279085406859448
|
|
|
2626 |
name: MTEB MassiveScenarioClassification (km)
|
2627 |
config: km
|
2628 |
split: test
|
2629 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2630 |
metrics:
|
2631 |
- type: accuracy
|
2632 |
value: 28.97108271687962
|
|
|
2639 |
name: MTEB MassiveScenarioClassification (kn)
|
2640 |
config: kn
|
2641 |
split: test
|
2642 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2643 |
metrics:
|
2644 |
- type: accuracy
|
2645 |
value: 19.27370544720915
|
|
|
2652 |
name: MTEB MassiveScenarioClassification (ko)
|
2653 |
config: ko
|
2654 |
split: test
|
2655 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2656 |
metrics:
|
2657 |
- type: accuracy
|
2658 |
value: 35.729657027572294
|
|
|
2665 |
name: MTEB MassiveScenarioClassification (lv)
|
2666 |
config: lv
|
2667 |
split: test
|
2668 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2669 |
metrics:
|
2670 |
- type: accuracy
|
2671 |
value: 39.57296570275723
|
|
|
2678 |
name: MTEB MassiveScenarioClassification (ml)
|
2679 |
config: ml
|
2680 |
split: test
|
2681 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2682 |
metrics:
|
2683 |
- type: accuracy
|
2684 |
value: 19.895763281775388
|
|
|
2691 |
name: MTEB MassiveScenarioClassification (mn)
|
2692 |
config: mn
|
2693 |
split: test
|
2694 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2695 |
metrics:
|
2696 |
- type: accuracy
|
2697 |
value: 32.431069266980494
|
|
|
2704 |
name: MTEB MassiveScenarioClassification (ms)
|
2705 |
config: ms
|
2706 |
split: test
|
2707 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2708 |
metrics:
|
2709 |
- type: accuracy
|
2710 |
value: 42.32347007397445
|
|
|
2717 |
name: MTEB MassiveScenarioClassification (my)
|
2718 |
config: my
|
2719 |
split: test
|
2720 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2721 |
metrics:
|
2722 |
- type: accuracy
|
2723 |
value: 20.864156018829856
|
|
|
2730 |
name: MTEB MassiveScenarioClassification (nb)
|
2731 |
config: nb
|
2732 |
split: test
|
2733 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2734 |
metrics:
|
2735 |
- type: accuracy
|
2736 |
value: 40.47074646940148
|
|
|
2743 |
name: MTEB MassiveScenarioClassification (nl)
|
2744 |
config: nl
|
2745 |
split: test
|
2746 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2747 |
metrics:
|
2748 |
- type: accuracy
|
2749 |
value: 43.591123066577
|
|
|
2756 |
name: MTEB MassiveScenarioClassification (pl)
|
2757 |
config: pl
|
2758 |
split: test
|
2759 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2760 |
metrics:
|
2761 |
- type: accuracy
|
2762 |
value: 41.876260928043045
|
|
|
2769 |
name: MTEB MassiveScenarioClassification (pt)
|
2770 |
config: pt
|
2771 |
split: test
|
2772 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2773 |
metrics:
|
2774 |
- type: accuracy
|
2775 |
value: 46.30800268997983
|
|
|
2782 |
name: MTEB MassiveScenarioClassification (ro)
|
2783 |
config: ro
|
2784 |
split: test
|
2785 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2786 |
metrics:
|
2787 |
- type: accuracy
|
2788 |
value: 42.525218560860786
|
|
|
2795 |
name: MTEB MassiveScenarioClassification (ru)
|
2796 |
config: ru
|
2797 |
split: test
|
2798 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2799 |
metrics:
|
2800 |
- type: accuracy
|
2801 |
value: 35.94821788836584
|
|
|
2808 |
name: MTEB MassiveScenarioClassification (sl)
|
2809 |
config: sl
|
2810 |
split: test
|
2811 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2812 |
metrics:
|
2813 |
- type: accuracy
|
2814 |
value: 38.69199731002017
|
|
|
2821 |
name: MTEB MassiveScenarioClassification (sq)
|
2822 |
config: sq
|
2823 |
split: test
|
2824 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2825 |
metrics:
|
2826 |
- type: accuracy
|
2827 |
value: 40.474108944182916
|
|
|
2834 |
name: MTEB MassiveScenarioClassification (sv)
|
2835 |
config: sv
|
2836 |
split: test
|
2837 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2838 |
metrics:
|
2839 |
- type: accuracy
|
2840 |
value: 41.523201075991935
|
|
|
2847 |
name: MTEB MassiveScenarioClassification (sw)
|
2848 |
config: sw
|
2849 |
split: test
|
2850 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2851 |
metrics:
|
2852 |
- type: accuracy
|
2853 |
value: 39.54942837928716
|
|
|
2860 |
name: MTEB MassiveScenarioClassification (ta)
|
2861 |
config: ta
|
2862 |
split: test
|
2863 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2864 |
metrics:
|
2865 |
- type: accuracy
|
2866 |
value: 22.8782784129119
|
|
|
2873 |
name: MTEB MassiveScenarioClassification (te)
|
2874 |
config: te
|
2875 |
split: test
|
2876 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2877 |
metrics:
|
2878 |
- type: accuracy
|
2879 |
value: 20.51445864156019
|
|
|
2886 |
name: MTEB MassiveScenarioClassification (th)
|
2887 |
config: th
|
2888 |
split: test
|
2889 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2890 |
metrics:
|
2891 |
- type: accuracy
|
2892 |
value: 34.92602555480834
|
|
|
2899 |
name: MTEB MassiveScenarioClassification (tl)
|
2900 |
config: tl
|
2901 |
split: test
|
2902 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2903 |
metrics:
|
2904 |
- type: accuracy
|
2905 |
value: 40.74983187626093
|
|
|
2912 |
name: MTEB MassiveScenarioClassification (tr)
|
2913 |
config: tr
|
2914 |
split: test
|
2915 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2916 |
metrics:
|
2917 |
- type: accuracy
|
2918 |
value: 39.06859448554136
|
|
|
2925 |
name: MTEB MassiveScenarioClassification (ur)
|
2926 |
config: ur
|
2927 |
split: test
|
2928 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2929 |
metrics:
|
2930 |
- type: accuracy
|
2931 |
value: 29.747814391392062
|
|
|
2938 |
name: MTEB MassiveScenarioClassification (vi)
|
2939 |
config: vi
|
2940 |
split: test
|
2941 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2942 |
metrics:
|
2943 |
- type: accuracy
|
2944 |
value: 38.02286482851379
|
|
|
2951 |
name: MTEB MassiveScenarioClassification (zh-CN)
|
2952 |
config: zh-CN
|
2953 |
split: test
|
2954 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2955 |
metrics:
|
2956 |
- type: accuracy
|
2957 |
value: 48.550773369199725
|
|
|
2964 |
name: MTEB MassiveScenarioClassification (zh-TW)
|
2965 |
config: zh-TW
|
2966 |
split: test
|
2967 |
+
revision: 7d571f92784cd94a019292a1f45445077d0ef634
|
2968 |
metrics:
|
2969 |
- type: accuracy
|
2970 |
value: 45.17821116341628
|
|
|
2977 |
name: MTEB MedrxivClusteringP2P
|
2978 |
config: default
|
2979 |
split: test
|
2980 |
+
revision: dcefc037ef84348e49b0d29109e891c01067226b
|
2981 |
metrics:
|
2982 |
- type: v_measure
|
2983 |
value: 28.301902023313875
|
|
|
2988 |
name: MTEB MedrxivClusteringS2S
|
2989 |
config: default
|
2990 |
split: test
|
2991 |
+
revision: 3cd0e71dfbe09d4de0f9e5ecba43e7ce280959dc
|
2992 |
metrics:
|
2993 |
- type: v_measure
|
2994 |
value: 24.932123582259287
|
|
|
2999 |
name: MTEB MindSmallReranking
|
3000 |
config: default
|
3001 |
split: test
|
3002 |
+
revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69
|
3003 |
metrics:
|
3004 |
- type: map
|
3005 |
value: 29.269341041468326
|
|
|
3012 |
name: MTEB NFCorpus
|
3013 |
config: default
|
3014 |
split: test
|
3015 |
+
revision: 7eb63cc0c1eb59324d709ebed25fcab851fa7610
|
3016 |
metrics:
|
3017 |
- type: map_at_1
|
3018 |
value: 1.2269999999999999
|
|
|
3069 |
name: MTEB NQ
|
3070 |
config: default
|
3071 |
split: test
|
3072 |
+
revision: 6062aefc120bfe8ece5897809fb2e53bfe0d128c
|
3073 |
metrics:
|
3074 |
- type: map_at_1
|
3075 |
value: 3.515
|
|
|
3126 |
name: MTEB QuoraRetrieval
|
3127 |
config: default
|
3128 |
split: test
|
3129 |
+
revision: 6205996560df11e3a3da9ab4f926788fc30a7db4
|
3130 |
metrics:
|
3131 |
- type: map_at_1
|
3132 |
value: 61.697
|
|
|
3183 |
name: MTEB RedditClustering
|
3184 |
config: default
|
3185 |
split: test
|
3186 |
+
revision: b2805658ae38990172679479369a78b86de8c390
|
3187 |
metrics:
|
3188 |
- type: v_measure
|
3189 |
value: 33.75741018380938
|
|
|
3194 |
name: MTEB RedditClusteringP2P
|
3195 |
config: default
|
3196 |
split: test
|
3197 |
+
revision: 385e3cb46b4cfa89021f56c4380204149d0efe33
|
3198 |
metrics:
|
3199 |
- type: v_measure
|
3200 |
value: 41.00799910099266
|
|
|
3205 |
name: MTEB SCIDOCS
|
3206 |
config: default
|
3207 |
split: test
|
3208 |
+
revision: 5c59ef3e437a0a9651c8fe6fde943e7dce59fba5
|
3209 |
metrics:
|
3210 |
- type: map_at_1
|
3211 |
value: 1.72
|
|
|
3262 |
name: MTEB SICK-R
|
3263 |
config: default
|
3264 |
split: test
|
3265 |
+
revision: 20a6d6f312dd54037fe07a32d58e5e168867909d
|
3266 |
metrics:
|
3267 |
- type: cos_sim_pearson
|
3268 |
value: 80.96286245858941
|
|
|
3283 |
name: MTEB STS12
|
3284 |
config: default
|
3285 |
split: test
|
3286 |
+
revision: fdf84275bb8ce4b49c971d02e84dd1abc677a50f
|
3287 |
metrics:
|
3288 |
- type: cos_sim_pearson
|
3289 |
value: 80.20938796088339
|
|
|
3304 |
name: MTEB STS13
|
3305 |
config: default
|
3306 |
split: test
|
3307 |
+
revision: 1591bfcbe8c69d4bf7fe2a16e2451017832cafb9
|
3308 |
metrics:
|
3309 |
- type: cos_sim_pearson
|
3310 |
value: 76.401935081936
|
|
|
3325 |
name: MTEB STS14
|
3326 |
config: default
|
3327 |
split: test
|
3328 |
+
revision: e2125984e7df8b7871f6ae9949cf6b6795e7c54b
|
3329 |
metrics:
|
3330 |
- type: cos_sim_pearson
|
3331 |
value: 75.35551963935667
|
|
|
3346 |
name: MTEB STS15
|
3347 |
config: default
|
3348 |
split: test
|
3349 |
+
revision: 1cd7298cac12a96a373b6a2f18738bb3e739a9b6
|
3350 |
metrics:
|
3351 |
- type: cos_sim_pearson
|
3352 |
value: 79.05293131911803
|
|
|
3367 |
name: MTEB STS16
|
3368 |
config: default
|
3369 |
split: test
|
3370 |
+
revision: 360a0b2dff98700d09e634a01e1cc1624d3e42cd
|
3371 |
metrics:
|
3372 |
- type: cos_sim_pearson
|
3373 |
value: 76.04750373932828
|
|
|
3388 |
name: MTEB STS17 (ko-ko)
|
3389 |
config: ko-ko
|
3390 |
split: test
|
3391 |
+
revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
|
3392 |
metrics:
|
3393 |
- type: cos_sim_pearson
|
3394 |
value: 43.0464619152799
|
|
|
3409 |
name: MTEB STS17 (ar-ar)
|
3410 |
config: ar-ar
|
3411 |
split: test
|
3412 |
+
revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
|
3413 |
metrics:
|
3414 |
- type: cos_sim_pearson
|
3415 |
value: 53.27469278912148
|
|
|
3430 |
name: MTEB STS17 (en-ar)
|
3431 |
config: en-ar
|
3432 |
split: test
|
3433 |
+
revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
|
3434 |
metrics:
|
3435 |
- type: cos_sim_pearson
|
3436 |
value: 1.5482997790039945
|
|
|
3451 |
name: MTEB STS17 (en-de)
|
3452 |
config: en-de
|
3453 |
split: test
|
3454 |
+
revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
|
3455 |
metrics:
|
3456 |
- type: cos_sim_pearson
|
3457 |
value: 27.5420218362265
|
|
|
3472 |
name: MTEB STS17 (en-en)
|
3473 |
config: en-en
|
3474 |
split: test
|
3475 |
+
revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
|
3476 |
metrics:
|
3477 |
- type: cos_sim_pearson
|
3478 |
value: 85.32029757646663
|
|
|
3493 |
name: MTEB STS17 (en-tr)
|
3494 |
config: en-tr
|
3495 |
split: test
|
3496 |
+
revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
|
3497 |
metrics:
|
3498 |
- type: cos_sim_pearson
|
3499 |
value: 4.37162299241808
|
|
|
3514 |
name: MTEB STS17 (es-en)
|
3515 |
config: es-en
|
3516 |
split: test
|
3517 |
+
revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
|
3518 |
metrics:
|
3519 |
- type: cos_sim_pearson
|
3520 |
value: 20.306030448858603
|
|
|
3535 |
name: MTEB STS17 (es-es)
|
3536 |
config: es-es
|
3537 |
split: test
|
3538 |
+
revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
|
3539 |
metrics:
|
3540 |
- type: cos_sim_pearson
|
3541 |
value: 66.81873207478459
|
|
|
3556 |
name: MTEB STS17 (fr-en)
|
3557 |
config: fr-en
|
3558 |
split: test
|
3559 |
+
revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
|
3560 |
metrics:
|
3561 |
- type: cos_sim_pearson
|
3562 |
value: 21.366487281202602
|
|
|
3577 |
name: MTEB STS17 (it-en)
|
3578 |
config: it-en
|
3579 |
split: test
|
3580 |
+
revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
|
3581 |
metrics:
|
3582 |
- type: cos_sim_pearson
|
3583 |
value: 20.73153177251085
|
|
|
3598 |
name: MTEB STS17 (nl-en)
|
3599 |
config: nl-en
|
3600 |
split: test
|
3601 |
+
revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
|
3602 |
metrics:
|
3603 |
- type: cos_sim_pearson
|
3604 |
value: 26.618435024084253
|
|
|
3619 |
name: MTEB STS22 (en)
|
3620 |
config: en
|
3621 |
split: test
|
3622 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3623 |
metrics:
|
3624 |
- type: cos_sim_pearson
|
3625 |
value: 59.17638344661753
|
|
|
3640 |
name: MTEB STS22 (de)
|
3641 |
config: de
|
3642 |
split: test
|
3643 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3644 |
metrics:
|
3645 |
- type: cos_sim_pearson
|
3646 |
value: 10.322254716987457
|
|
|
3661 |
name: MTEB STS22 (es)
|
3662 |
config: es
|
3663 |
split: test
|
3664 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3665 |
metrics:
|
3666 |
- type: cos_sim_pearson
|
3667 |
value: 43.38031880545056
|
|
|
3682 |
name: MTEB STS22 (pl)
|
3683 |
config: pl
|
3684 |
split: test
|
3685 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3686 |
metrics:
|
3687 |
- type: cos_sim_pearson
|
3688 |
value: 4.291290504363136
|
|
|
3703 |
name: MTEB STS22 (tr)
|
3704 |
config: tr
|
3705 |
split: test
|
3706 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3707 |
metrics:
|
3708 |
- type: cos_sim_pearson
|
3709 |
value: 4.102739498555817
|
|
|
3724 |
name: MTEB STS22 (ar)
|
3725 |
config: ar
|
3726 |
split: test
|
3727 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3728 |
metrics:
|
3729 |
- type: cos_sim_pearson
|
3730 |
value: 2.38765395226737
|
|
|
3745 |
name: MTEB STS22 (ru)
|
3746 |
config: ru
|
3747 |
split: test
|
3748 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3749 |
metrics:
|
3750 |
- type: cos_sim_pearson
|
3751 |
value: 7.6735490672676345
|
|
|
3766 |
name: MTEB STS22 (zh)
|
3767 |
config: zh
|
3768 |
split: test
|
3769 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3770 |
metrics:
|
3771 |
- type: cos_sim_pearson
|
3772 |
value: 0.06167614416104335
|
|
|
3787 |
name: MTEB STS22 (fr)
|
3788 |
config: fr
|
3789 |
split: test
|
3790 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3791 |
metrics:
|
3792 |
- type: cos_sim_pearson
|
3793 |
value: 53.19490347682836
|
|
|
3808 |
name: MTEB STS22 (de-en)
|
3809 |
config: de-en
|
3810 |
split: test
|
3811 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3812 |
metrics:
|
3813 |
- type: cos_sim_pearson
|
3814 |
value: 51.151158530122146
|
|
|
3829 |
name: MTEB STS22 (es-en)
|
3830 |
config: es-en
|
3831 |
split: test
|
3832 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3833 |
metrics:
|
3834 |
- type: cos_sim_pearson
|
3835 |
value: 30.36194885126792
|
|
|
3850 |
name: MTEB STS22 (it)
|
3851 |
config: it
|
3852 |
split: test
|
3853 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3854 |
metrics:
|
3855 |
- type: cos_sim_pearson
|
3856 |
value: 35.23883630335275
|
|
|
3871 |
name: MTEB STS22 (pl-en)
|
3872 |
config: pl-en
|
3873 |
split: test
|
3874 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3875 |
metrics:
|
3876 |
- type: cos_sim_pearson
|
3877 |
value: 19.809302548119547
|
|
|
3892 |
name: MTEB STS22 (zh-en)
|
3893 |
config: zh-en
|
3894 |
split: test
|
3895 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3896 |
metrics:
|
3897 |
- type: cos_sim_pearson
|
3898 |
value: 20.393500955410488
|
|
|
3913 |
name: MTEB STS22 (es-it)
|
3914 |
config: es-it
|
3915 |
split: test
|
3916 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3917 |
metrics:
|
3918 |
- type: cos_sim_pearson
|
3919 |
value: 36.58919983075148
|
|
|
3934 |
name: MTEB STS22 (de-fr)
|
3935 |
config: de-fr
|
3936 |
split: test
|
3937 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3938 |
metrics:
|
3939 |
- type: cos_sim_pearson
|
3940 |
value: 26.350936227950083
|
|
|
3955 |
name: MTEB STS22 (de-pl)
|
3956 |
config: de-pl
|
3957 |
split: test
|
3958 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3959 |
metrics:
|
3960 |
- type: cos_sim_pearson
|
3961 |
value: 20.056269198600322
|
|
|
3976 |
name: MTEB STS22 (fr-pl)
|
3977 |
config: fr-pl
|
3978 |
split: test
|
3979 |
+
revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
|
3980 |
metrics:
|
3981 |
- type: cos_sim_pearson
|
3982 |
value: 19.563740271419395
|
|
|
3997 |
name: MTEB STSBenchmark
|
3998 |
config: default
|
3999 |
split: test
|
4000 |
+
revision: 8913289635987208e6e7c72789e4be2fe94b6abd
|
4001 |
metrics:
|
4002 |
- type: cos_sim_pearson
|
4003 |
value: 80.00905671833966
|
|
|
4018 |
name: MTEB SciDocsRR
|
4019 |
config: default
|
4020 |
split: test
|
4021 |
+
revision: 56a6d0140cf6356659e2a7c1413286a774468d44
|
4022 |
metrics:
|
4023 |
- type: map
|
4024 |
value: 68.35710819755543
|
|
|
4031 |
name: MTEB SciFact
|
4032 |
config: default
|
4033 |
split: test
|
4034 |
+
revision: a75ae049398addde9b70f6b268875f5cbce99089
|
4035 |
metrics:
|
4036 |
- type: map_at_1
|
4037 |
value: 21.556
|
|
|
4088 |
name: MTEB SprintDuplicateQuestions
|
4089 |
config: default
|
4090 |
split: test
|
4091 |
+
revision: 5a8256d0dff9c4bd3be3ba3e67e4e70173f802ea
|
4092 |
metrics:
|
4093 |
- type: cos_sim_accuracy
|
4094 |
value: 99.49306930693069
|
|
|
4143 |
name: MTEB StackExchangeClustering
|
4144 |
config: default
|
4145 |
split: test
|
4146 |
+
revision: 70a89468f6dccacc6aa2b12a6eac54e74328f235
|
4147 |
metrics:
|
4148 |
- type: v_measure
|
4149 |
value: 44.59127540530939
|
|
|
4154 |
name: MTEB StackExchangeClusteringP2P
|
4155 |
config: default
|
4156 |
split: test
|
4157 |
+
revision: d88009ab563dd0b16cfaf4436abaf97fa3550cf0
|
4158 |
metrics:
|
4159 |
- type: v_measure
|
4160 |
value: 28.230204578753636
|
|
|
4165 |
name: MTEB StackOverflowDupQuestions
|
4166 |
config: default
|
4167 |
split: test
|
4168 |
+
revision: ef807ea29a75ec4f91b50fd4191cb4ee4589a9f9
|
4169 |
metrics:
|
4170 |
- type: map
|
4171 |
value: 39.96520488022785
|
|
|
4178 |
name: MTEB SummEval
|
4179 |
config: default
|
4180 |
split: test
|
4181 |
+
revision: 8753c2788d36c01fc6f05d03fe3f7268d63f9122
|
4182 |
metrics:
|
4183 |
- type: cos_sim_pearson
|
4184 |
value: 30.56303767714449
|
|
|
4195 |
name: MTEB TRECCOVID
|
4196 |
config: default
|
4197 |
split: test
|
4198 |
+
revision: 2c8041b2c07a79b6f7ba8fe6acc72e5d9f92d217
|
4199 |
metrics:
|
4200 |
- type: map_at_1
|
4201 |
value: 0.11299999999999999
|
|
|
4252 |
name: MTEB Touche2020
|
4253 |
config: default
|
4254 |
split: test
|
4255 |
+
revision: 527b7d77e16e343303e68cb6af11d6e18b9f7b3b
|
4256 |
metrics:
|
4257 |
- type: map_at_1
|
4258 |
value: 0.645
|
|
|
4309 |
name: MTEB ToxicConversationsClassification
|
4310 |
config: default
|
4311 |
split: test
|
4312 |
+
revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de
|
4313 |
metrics:
|
4314 |
- type: accuracy
|
4315 |
value: 62.7862
|
|
|
4324 |
name: MTEB TweetSentimentExtractionClassification
|
4325 |
config: default
|
4326 |
split: test
|
4327 |
+
revision: 62146448f05be9e52a36b8ee9936447ea787eede
|
4328 |
metrics:
|
4329 |
- type: accuracy
|
4330 |
value: 54.821731748726656
|
|
|
4337 |
name: MTEB TwentyNewsgroupsClustering
|
4338 |
config: default
|
4339 |
split: test
|
4340 |
+
revision: 091a54f9a36281ce7d6590ec8c75dd485e7e01d4
|
4341 |
metrics:
|
4342 |
- type: v_measure
|
4343 |
value: 28.24295128553035
|
|
|
4348 |
name: MTEB TwitterSemEval2015
|
4349 |
config: default
|
4350 |
split: test
|
4351 |
+
revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1
|
4352 |
metrics:
|
4353 |
- type: cos_sim_accuracy
|
4354 |
value: 81.5640460153782
|
|
|
4403 |
name: MTEB TwitterURLCorpus
|
4404 |
config: default
|
4405 |
split: test
|
4406 |
+
revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf
|
4407 |
metrics:
|
4408 |
- type: cos_sim_accuracy
|
4409 |
value: 86.63018589668955
|