File size: 59,144 Bytes
02c86b7
 
 
3a00556
02c86b7
 
059e808
 
3a00556
 
 
059e808
02c86b7
 
059e808
02c86b7
3a00556
02c86b7
3a00556
059e808
02c86b7
3a00556
02c86b7
059e808
02c86b7
 
059e808
 
02c86b7
 
059e808
 
 
3a00556
 
059e808
3a00556
059e808
 
3a00556
 
059e808
3a00556
 
059e808
02c86b7
059e808
 
 
17a8024
02c86b7
059e808
02c86b7
 
059e808
 
 
 
 
 
02c86b7
 
 
 
 
 
 
3a00556
 
 
 
 
 
 
 
 
 
 
 
059e808
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
{"index":39,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-sage\/Giga-Embeddings-instruct\">Giga-Embeddings-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average":73.7,"GeoreviewClassification":58.59,"HeadlineClassification":86.01,"InappropriatenessClassification":83.29,"KinopoiskClassification":74.89,"RuReviewsClassification":73.72,"RuSciBenchGRNTIClassification":74.52,"RuSciBenchOECDClassification":61.51,"MassiveIntentClassification (ru)":72.94,"MassiveScenarioClassification (ru)":77.82}
{"index":35,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/FRIDA\">FRIDA<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average":73.67,"GeoreviewClassification":57.71,"HeadlineClassification":89.02,"InappropriatenessClassification":78.33,"KinopoiskClassification":70.47,"RuReviewsClassification":75.05,"RuSciBenchGRNTIClassification":69.9,"RuSciBenchOECDClassification":54.63,"MassiveIntentClassification (ru)":79.12,"MassiveScenarioClassification (ru)":88.81}
{"index":6,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Alibaba-NLP\/gte-Qwen2-7B-instruct\">gte-Qwen2-7B-instruct<\/a>","Model Size (Million Parameters)":7613,"Memory Usage (GB, fp32)":28.36,"Embedding Dimensions":3584,"Max Tokens":32768,"Average":72.99,"GeoreviewClassification":60.01,"HeadlineClassification":76.38,"InappropriatenessClassification":76.41,"KinopoiskClassification":71.04,"RuReviewsClassification":74.85,"RuSciBenchGRNTIClassification":71.01,"RuSciBenchOECDClassification":57.68,"MassiveIntentClassification (ru)":82.1,"MassiveScenarioClassification (ru)":87.43}
{"index":24,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/GritLM\/GritLM-7B\">GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Embedding Dimensions":"","Max Tokens":4096,"Average":69.95,"GeoreviewClassification":57.7,"HeadlineClassification":86.08,"InappropriatenessClassification":67.85,"KinopoiskClassification":69.15,"RuReviewsClassification":72.05,"RuSciBenchGRNTIClassification":67.98,"RuSciBenchOECDClassification":54.45,"MassiveIntentClassification (ru)":76.01,"MassiveScenarioClassification (ru)":78.28}
{"index":34,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Salesforce\/SFR-Embedding-Mistral\">SFR-Embedding-Mistral<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average":69.86,"GeoreviewClassification":57.36,"HeadlineClassification":87.11,"InappropriatenessClassification":70.64,"KinopoiskClassification":68.9,"RuReviewsClassification":71.01,"RuSciBenchGRNTIClassification":68.08,"RuSciBenchOECDClassification":54.1,"MassiveIntentClassification (ru)":73.86,"MassiveScenarioClassification (ru)":77.65}
{"index":5,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Alibaba-NLP\/gte-Qwen1.5-7B-instruct\">gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"Embedding Dimensions":4096,"Max Tokens":32768,"Average":69.48,"GeoreviewClassification":59.31,"HeadlineClassification":82.32,"InappropriatenessClassification":73.26,"KinopoiskClassification":67.17,"RuReviewsClassification":72.89,"RuSciBenchGRNTIClassification":67.35,"RuSciBenchOECDClassification":54.38,"MassiveIntentClassification (ru)":73.15,"MassiveScenarioClassification (ru)":75.46}
{"index":61,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average":69.11,"GeoreviewClassification":56.72,"HeadlineClassification":87.02,"InappropriatenessClassification":70.36,"KinopoiskClassification":68.35,"RuReviewsClassification":70.57,"RuSciBenchGRNTIClassification":66.05,"RuSciBenchOECDClassification":52.11,"MassiveIntentClassification (ru)":73.74,"MassiveScenarioClassification (ru)":77.1}
{"index":65,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large-instruct\">multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average":66.31,"GeoreviewClassification":55.9,"HeadlineClassification":86.18,"InappropriatenessClassification":65.53,"KinopoiskClassification":66.12,"RuReviewsClassification":68.56,"RuSciBenchGRNTIClassification":65.07,"RuSciBenchOECDClassification":50.21,"MassiveIntentClassification (ru)":67.6,"MassiveScenarioClassification (ru)":71.59}
{"index":25,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/HIT-TMG\/KaLM-embedding-multilingual-mini-instruct-v1\">KaLM-embedding-multilingual-mini-instruct-v1<\/a>","Model Size (Million Parameters)":494,"Memory Usage (GB, fp32)":1.84,"Embedding Dimensions":896,"Max Tokens":131072,"Average":65.21,"GeoreviewClassification":52.04,"HeadlineClassification":83.4,"InappropriatenessClassification":64.14,"KinopoiskClassification":63.15,"RuReviewsClassification":68.24,"RuSciBenchGRNTIClassification":61.24,"RuSciBenchOECDClassification":48.1,"MassiveIntentClassification (ru)":68.61,"MassiveScenarioClassification (ru)":77.98}
{"index":26,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/HIT-TMG\/KaLM-embedding-multilingual-mini-instruct-v1.5\">KaLM-embedding-multilingual-mini-instruct-v1.5<\/a>","Model Size (Million Parameters)":494,"Memory Usage (GB, fp32)":1.84,"Embedding Dimensions":896,"Max Tokens":131072,"Average":65.02,"GeoreviewClassification":51.5,"HeadlineClassification":84.37,"InappropriatenessClassification":65.89,"KinopoiskClassification":62.29,"RuReviewsClassification":67.7,"RuSciBenchGRNTIClassification":62.76,"RuSciBenchOECDClassification":49.77,"MassiveIntentClassification (ru)":66.07,"MassiveScenarioClassification (ru)":74.81}
{"index":20,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Daxtra\/jina-embeddings-v3\">jina-embeddings-v3<\/a>","Model Size (Million Parameters)":572,"Memory Usage (GB, fp32)":2.13,"Embedding Dimensions":1024,"Max Tokens":8194,"Average":64.41,"GeoreviewClassification":48.01,"HeadlineClassification":75.08,"InappropriatenessClassification":61.05,"KinopoiskClassification":62.39,"RuReviewsClassification":67.58,"RuSciBenchGRNTIClassification":59.19,"RuSciBenchOECDClassification":45.56,"MassiveIntentClassification (ru)":76.8,"MassiveScenarioClassification (ru)":84.06}
{"index":70,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/jinaai\/jina-embeddings-v3\">jina-embeddings-v3<\/a>","Model Size (Million Parameters)":572,"Memory Usage (GB, fp32)":2.13,"Embedding Dimensions":1024,"Max Tokens":8194,"Average":64.41,"GeoreviewClassification":48.01,"HeadlineClassification":75.08,"InappropriatenessClassification":61.05,"KinopoiskClassification":62.39,"RuReviewsClassification":67.58,"RuSciBenchGRNTIClassification":59.19,"RuSciBenchOECDClassification":45.56,"MassiveIntentClassification (ru)":76.8,"MassiveScenarioClassification (ru)":84.06}
{"index":19,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Cohere\/Cohere-embed-multilingual-v3.0\">Cohere-embed-multilingual-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":512,"Average":63.92,"GeoreviewClassification":50.88,"HeadlineClassification":79.5,"InappropriatenessClassification":62.46,"KinopoiskClassification":61.84,"RuReviewsClassification":66.83,"RuSciBenchGRNTIClassification":62.17,"RuSciBenchOECDClassification":48.22,"MassiveIntentClassification (ru)":69.08,"MassiveScenarioClassification (ru)":74.26}
{"index":2,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/docs.voyageai.com\/embeddings\/\">voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":16000,"Average":63.8,"GeoreviewClassification":47.18,"HeadlineClassification":75.1,"InappropriatenessClassification":62.99,"KinopoiskClassification":64.43,"RuReviewsClassification":65.99,"RuSciBenchGRNTIClassification":61.46,"RuSciBenchOECDClassification":47.86,"MassiveIntentClassification (ru)":70.87,"MassiveScenarioClassification (ru)":78.34}
{"index":95,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/openai.com\/blog\/new-embedding-models-and-api-updates\">text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":3072,"Max Tokens":8191,"Average":62.89,"GeoreviewClassification":47.06,"HeadlineClassification":77.19,"InappropriatenessClassification":61.5,"KinopoiskClassification":60.21,"RuReviewsClassification":66.11,"RuSciBenchGRNTIClassification":61.04,"RuSciBenchOECDClassification":47.07,"MassiveIntentClassification (ru)":70.11,"MassiveScenarioClassification (ru)":75.69}
{"index":36,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/ru-en-RoSBERTa\">ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Embedding Dimensions":1024,"Max Tokens":514,"Average":62.74,"GeoreviewClassification":49.7,"HeadlineClassification":78.0,"InappropriatenessClassification":61.32,"KinopoiskClassification":63.27,"RuReviewsClassification":67.96,"RuSciBenchGRNTIClassification":59.33,"RuSciBenchOECDClassification":46.33,"MassiveIntentClassification (ru)":66.97,"MassiveScenarioClassification (ru)":71.8}
{"index":47,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-bge-m3\">USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Embedding Dimensions":1024,"Max Tokens":8192,"Average":61.92,"GeoreviewClassification":50.98,"HeadlineClassification":70.09,"InappropriatenessClassification":60.76,"KinopoiskClassification":63.33,"RuReviewsClassification":68.52,"RuSciBenchGRNTIClassification":57.67,"RuSciBenchOECDClassification":44.2,"MassiveIntentClassification (ru)":68.85,"MassiveScenarioClassification (ru)":72.9}
{"index":3,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/docs.voyageai.com\/embeddings\/\">voyage-multilingual-2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":32000,"Average":61.62,"GeoreviewClassification":45.45,"HeadlineClassification":79.3,"InappropriatenessClassification":60.33,"KinopoiskClassification":57.99,"RuReviewsClassification":63.12,"RuSciBenchGRNTIClassification":60.63,"RuSciBenchOECDClassification":46.05,"MassiveIntentClassification (ru)":67.89,"MassiveScenarioClassification (ru)":73.86}
{"index":27,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/HIT-TMG\/KaLM-embedding-multilingual-mini-v1\">KaLM-embedding-multilingual-mini-v1<\/a>","Model Size (Million Parameters)":494,"Memory Usage (GB, fp32)":1.84,"Embedding Dimensions":896,"Max Tokens":131072,"Average":61.58,"GeoreviewClassification":47.69,"HeadlineClassification":83.46,"InappropriatenessClassification":61.32,"KinopoiskClassification":59.04,"RuReviewsClassification":66.09,"RuSciBenchGRNTIClassification":61.41,"RuSciBenchOECDClassification":48.67,"MassiveIntentClassification (ru)":60.08,"MassiveScenarioClassification (ru)":66.44}
{"index":64,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average":61.01,"GeoreviewClassification":49.69,"HeadlineClassification":77.19,"InappropriatenessClassification":61.59,"KinopoiskClassification":56.59,"RuReviewsClassification":65.28,"RuSciBenchGRNTIClassification":58.2,"RuSciBenchOECDClassification":43.91,"MassiveIntentClassification (ru)":65.76,"MassiveScenarioClassification (ru)":70.85}
{"index":11,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Embedding Dimensions":1024,"Max Tokens":8192,"Average":60.46,"GeoreviewClassification":48.27,"HeadlineClassification":70.32,"InappropriatenessClassification":59.87,"KinopoiskClassification":58.23,"RuReviewsClassification":66.91,"RuSciBenchGRNTIClassification":55.81,"RuSciBenchOECDClassification":42.57,"MassiveIntentClassification (ru)":68.76,"MassiveScenarioClassification (ru)":73.42}
{"index":46,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-base\">USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Embedding Dimensions":768,"Max Tokens":512,"Average":59.89,"GeoreviewClassification":46.88,"HeadlineClassification":75.0,"InappropriatenessClassification":61.83,"KinopoiskClassification":56.03,"RuReviewsClassification":65.48,"RuSciBenchGRNTIClassification":55.55,"RuSciBenchOECDClassification":43.28,"MassiveIntentClassification (ru)":65.92,"MassiveScenarioClassification (ru)":69.06}
{"index":0,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/docs.voyageai.com\/embeddings\/\">voyage-3-lite<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":512,"Max Tokens":32000,"Average":59.26,"GeoreviewClassification":45.4,"HeadlineClassification":78.34,"InappropriatenessClassification":59.04,"KinopoiskClassification":57.39,"RuReviewsClassification":59.14,"RuSciBenchGRNTIClassification":58.03,"RuSciBenchOECDClassification":44.54,"MassiveIntentClassification (ru)":63.02,"MassiveScenarioClassification (ru)":68.44}
{"index":91,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/LaBSE-ru-turbo\">LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Embedding Dimensions":768,"Max Tokens":512,"Average":59.23,"GeoreviewClassification":46.04,"HeadlineClassification":69.98,"InappropriatenessClassification":61.39,"KinopoiskClassification":53.59,"RuReviewsClassification":64.58,"RuSciBenchGRNTIClassification":56.67,"RuSciBenchOECDClassification":43.58,"MassiveIntentClassification (ru)":66.08,"MassiveScenarioClassification (ru)":71.13}
{"index":1,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/docs.voyageai.com\/embeddings\/\">voyage-3<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":32000,"Average":58.75,"GeoreviewClassification":43.02,"HeadlineClassification":76.74,"InappropriatenessClassification":59.62,"KinopoiskClassification":62.14,"RuReviewsClassification":58.37,"RuSciBenchGRNTIClassification":59.01,"RuSciBenchOECDClassification":45.37,"MassiveIntentClassification (ru)":58.32,"MassiveScenarioClassification (ru)":66.18}
{"index":18,"Rank":26,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Cohere\/Cohere-embed-multilingual-light-v3.0\">Cohere-embed-multilingual-light-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":384,"Max Tokens":512,"Average":58.42,"GeoreviewClassification":44.47,"HeadlineClassification":76.3,"InappropriatenessClassification":59.73,"KinopoiskClassification":50.88,"RuReviewsClassification":61.96,"RuSciBenchGRNTIClassification":58.37,"RuSciBenchOECDClassification":45.1,"MassiveIntentClassification (ru)":61.88,"MassiveScenarioClassification (ru)":67.1}
{"index":96,"Rank":27,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/openai.com\/blog\/new-embedding-models-and-api-updates\">text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":8191,"Average":58.26,"GeoreviewClassification":42.96,"HeadlineClassification":74.06,"InappropriatenessClassification":58.86,"KinopoiskClassification":55.03,"RuReviewsClassification":61.07,"RuSciBenchGRNTIClassification":55.56,"RuSciBenchOECDClassification":43.35,"MassiveIntentClassification (ru)":63.87,"MassiveScenarioClassification (ru)":69.58}
{"index":63,"Rank":28,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average":58.26,"GeoreviewClassification":46.05,"HeadlineClassification":75.64,"InappropriatenessClassification":58.78,"KinopoiskClassification":50.89,"RuReviewsClassification":62.99,"RuSciBenchGRNTIClassification":56.28,"RuSciBenchOECDClassification":42.69,"MassiveIntentClassification (ru)":62.78,"MassiveScenarioClassification (ru)":68.21}
{"index":37,"Rank":29,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_mt_nlu_ru\">sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Embedding Dimensions":1024,"Max Tokens":514,"Average":57.52,"GeoreviewClassification":39.67,"HeadlineClassification":77.19,"InappropriatenessClassification":64.64,"KinopoiskClassification":50.33,"RuReviewsClassification":58.29,"RuSciBenchGRNTIClassification":54.19,"RuSciBenchOECDClassification":43.8,"MassiveIntentClassification (ru)":61.42,"MassiveScenarioClassification (ru)":68.13}
{"index":38,"Rank":30,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_nlu_ru\">sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Embedding Dimensions":1024,"Max Tokens":512,"Average":57.24,"GeoreviewClassification":39.97,"HeadlineClassification":79.26,"InappropriatenessClassification":62.52,"KinopoiskClassification":49.51,"RuReviewsClassification":58.27,"RuSciBenchGRNTIClassification":53.9,"RuSciBenchOECDClassification":43.04,"MassiveIntentClassification (ru)":61.09,"MassiveScenarioClassification (ru)":67.6}
{"index":87,"Rank":31,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average":56.88,"GeoreviewClassification":42.33,"HeadlineClassification":70.35,"InappropriatenessClassification":59.32,"KinopoiskClassification":44.31,"RuReviewsClassification":62.33,"RuSciBenchGRNTIClassification":56.01,"RuSciBenchOECDClassification":44.14,"MassiveIntentClassification (ru)":63.23,"MassiveScenarioClassification (ru)":69.92}
{"index":71,"Rank":32,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/jncraton\/multilingual-e5-small-ct2-int8\">multilingual-e5-small-ct2-int8<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average":56.45,"GeoreviewClassification":44.66,"HeadlineClassification":73.94,"InappropriatenessClassification":59.16,"KinopoiskClassification":49.96,"RuReviewsClassification":61.18,"RuSciBenchGRNTIClassification":54.99,"RuSciBenchOECDClassification":41.72,"MassiveIntentClassification (ru)":58.65,"MassiveScenarioClassification (ru)":63.77}
{"index":66,"Rank":33,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average":56.44,"GeoreviewClassification":44.66,"HeadlineClassification":73.94,"InappropriatenessClassification":59.16,"KinopoiskClassification":49.96,"RuReviewsClassification":61.18,"RuSciBenchGRNTIClassification":54.99,"RuSciBenchOECDClassification":41.72,"MassiveIntentClassification (ru)":58.43,"MassiveScenarioClassification (ru)":63.89}
{"index":48,"Rank":34,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Embedding Dimensions":768,"Max Tokens":512,"Average":56.18,"GeoreviewClassification":40.19,"HeadlineClassification":78.75,"InappropriatenessClassification":61.33,"KinopoiskClassification":48.78,"RuReviewsClassification":55.66,"RuSciBenchGRNTIClassification":53.53,"RuSciBenchOECDClassification":41.34,"MassiveIntentClassification (ru)":61.32,"MassiveScenarioClassification (ru)":64.71}
{"index":21,"Rank":35,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/distilrubert-small-cased-conversational\">distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Embedding Dimensions":768,"Max Tokens":512,"Average":55.15,"GeoreviewClassification":38.95,"HeadlineClassification":75.59,"InappropriatenessClassification":60.68,"KinopoiskClassification":49.67,"RuReviewsClassification":54.05,"RuSciBenchGRNTIClassification":48.53,"RuSciBenchOECDClassification":37.65,"MassiveIntentClassification (ru)":63.12,"MassiveScenarioClassification (ru)":68.08}
{"index":92,"Rank":36,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Embedding Dimensions":312,"Max Tokens":2048,"Average":55.01,"GeoreviewClassification":41.36,"HeadlineClassification":68.9,"InappropriatenessClassification":59.11,"KinopoiskClassification":50.47,"RuReviewsClassification":60.66,"RuSciBenchGRNTIClassification":52.93,"RuSciBenchOECDClassification":40.79,"MassiveIntentClassification (ru)":57.98,"MassiveScenarioClassification (ru)":62.9}
{"index":43,"Rank":37,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Embedding Dimensions":768,"Max Tokens":512,"Average":54.98,"GeoreviewClassification":40.89,"HeadlineClassification":68.75,"InappropriatenessClassification":58.48,"KinopoiskClassification":49.85,"RuReviewsClassification":58.01,"RuSciBenchGRNTIClassification":52.8,"RuSciBenchOECDClassification":40.36,"MassiveIntentClassification (ru)":60.53,"MassiveScenarioClassification (ru)":65.15}
{"index":77,"Rank":38,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Embedding Dimensions":768,"Max Tokens":512,"Average":54.71,"GeoreviewClassification":40.86,"HeadlineClassification":68.75,"InappropriatenessClassification":58.52,"KinopoiskClassification":46.77,"RuReviewsClassification":58.01,"RuSciBenchGRNTIClassification":53.04,"RuSciBenchOECDClassification":40.48,"MassiveIntentClassification (ru)":60.67,"MassiveScenarioClassification (ru)":65.25}
{"index":86,"Rank":39,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average":53.77,"GeoreviewClassification":38.24,"HeadlineClassification":68.3,"InappropriatenessClassification":58.18,"KinopoiskClassification":41.45,"RuReviewsClassification":58.88,"RuSciBenchGRNTIClassification":53.19,"RuSciBenchOECDClassification":41.41,"MassiveIntentClassification (ru)":59.06,"MassiveScenarioClassification (ru)":65.25}
{"index":45,"Rank":40,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny2\">rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Embedding Dimensions":2048,"Max Tokens":514,"Average":52.17,"GeoreviewClassification":39.64,"HeadlineClassification":74.19,"InappropriatenessClassification":58.57,"KinopoiskClassification":49.06,"RuReviewsClassification":56.99,"RuSciBenchGRNTIClassification":45.63,"RuSciBenchOECDClassification":35.48,"MassiveIntentClassification (ru)":50.83,"MassiveScenarioClassification (ru)":59.15}
{"index":22,"Rank":41,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased\">rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average":51.6,"GeoreviewClassification":37.22,"HeadlineClassification":75.23,"InappropriatenessClassification":57.34,"KinopoiskClassification":49.91,"RuReviewsClassification":50.74,"RuSciBenchGRNTIClassification":48.03,"RuSciBenchOECDClassification":36.13,"MassiveIntentClassification (ru)":53.02,"MassiveScenarioClassification (ru)":56.79}
{"index":93,"Rank":42,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/shibing624\/text2vec-base-multilingual\">text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":256,"Average":51.35,"GeoreviewClassification":34.63,"HeadlineClassification":62.29,"InappropriatenessClassification":57.37,"KinopoiskClassification":42.09,"RuReviewsClassification":56.71,"RuSciBenchGRNTIClassification":49.18,"RuSciBenchOECDClassification":39.55,"MassiveIntentClassification (ru)":58.02,"MassiveScenarioClassification (ru)":62.31}
{"index":23,"Rank":43,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average":51.27,"GeoreviewClassification":38.05,"HeadlineClassification":67.64,"InappropriatenessClassification":58.27,"KinopoiskClassification":45.86,"RuReviewsClassification":58.34,"RuSciBenchGRNTIClassification":52.18,"RuSciBenchOECDClassification":40.11,"MassiveIntentClassification (ru)":49.1,"MassiveScenarioClassification (ru)":51.91}
{"index":44,"Rank":44,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny\">rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Embedding Dimensions":512,"Max Tokens":514,"Average":44.55,"GeoreviewClassification":33.45,"HeadlineClassification":57.65,"InappropriatenessClassification":54.5,"KinopoiskClassification":41.36,"RuReviewsClassification":49.56,"RuSciBenchGRNTIClassification":35.71,"RuSciBenchOECDClassification":26.51,"MassiveIntentClassification (ru)":50.1,"MassiveScenarioClassification (ru)":52.15}
{"index":67,"Rank":45,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/izhx\/udever-bloom-1b1\">udever-bloom-1b1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1536,"Max Tokens":2048,"Average":40.89,"GeoreviewClassification":30.5,"HeadlineClassification":36.09,"InappropriatenessClassification":54.64,"KinopoiskClassification":41.23,"RuReviewsClassification":45.26,"RuSciBenchGRNTIClassification":30.37,"RuSciBenchOECDClassification":22.65,"MassiveIntentClassification (ru)":52.99,"MassiveScenarioClassification (ru)":54.26}
{"index":17,"Rank":46,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Cohere\/Cohere-embed-english-v3.0\">Cohere-embed-english-v3.0<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":512,"Average":38.84,"GeoreviewClassification":32.95,"HeadlineClassification":45.37,"InappropriatenessClassification":54.71,"KinopoiskClassification":36.31,"RuReviewsClassification":48.71,"RuSciBenchGRNTIClassification":24.06,"RuSciBenchOECDClassification":19.18,"MassiveIntentClassification (ru)":41.81,"MassiveScenarioClassification (ru)":46.46}
{"index":60,"Rank":47,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-large-v2\">e5-large-v2<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Embedding Dimensions":1024,"Max Tokens":512,"Average":38.22,"GeoreviewClassification":32.09,"HeadlineClassification":44.38,"InappropriatenessClassification":53.71,"KinopoiskClassification":37.27,"RuReviewsClassification":47.99,"RuSciBenchGRNTIClassification":24.33,"RuSciBenchOECDClassification":18.45,"MassiveIntentClassification (ru)":40.62,"MassiveScenarioClassification (ru)":45.16}
{"index":58,"Rank":48,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-base-v2\">e5-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average":36.0,"GeoreviewClassification":30.9,"HeadlineClassification":41.74,"InappropriatenessClassification":53.27,"KinopoiskClassification":34.67,"RuReviewsClassification":47.13,"RuSciBenchGRNTIClassification":20.87,"RuSciBenchOECDClassification":16.49,"MassiveIntentClassification (ru)":36.82,"MassiveScenarioClassification (ru)":42.08}
{"index":68,"Rank":49,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/izhx\/udever-bloom-560m\">udever-bloom-560m<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":1024,"Max Tokens":2048,"Average":35.09,"GeoreviewClassification":28.08,"HeadlineClassification":28.25,"InappropriatenessClassification":52.74,"KinopoiskClassification":39.85,"RuReviewsClassification":41.19,"RuSciBenchGRNTIClassification":16.08,"RuSciBenchOECDClassification":12.1,"MassiveIntentClassification (ru)":47.61,"MassiveScenarioClassification (ru)":49.94}
{"index":50,"Rank":50,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/dwzhu\/e5-base-4k\">e5-base-4k<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average":34.6,"GeoreviewClassification":29.03,"HeadlineClassification":34.62,"InappropriatenessClassification":52.6,"KinopoiskClassification":38.3,"RuReviewsClassification":44.29,"RuSciBenchGRNTIClassification":17.9,"RuSciBenchOECDClassification":14.1,"MassiveIntentClassification (ru)":40.19,"MassiveScenarioClassification (ru)":40.39}
{"index":10,"Rank":51,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-large-en-v1.5\">bge-large-en-v1.5<\/a>","Model Size (Million Parameters)":1340,"Memory Usage (GB, fp32)":4.99,"Embedding Dimensions":1024,"Max Tokens":512,"Average":33.71,"GeoreviewClassification":28.64,"HeadlineClassification":33.56,"InappropriatenessClassification":51.81,"KinopoiskClassification":35.68,"RuReviewsClassification":44.62,"RuSciBenchGRNTIClassification":22.05,"RuSciBenchOECDClassification":16.53,"MassiveIntentClassification (ru)":32.31,"MassiveScenarioClassification (ru)":38.22}
{"index":73,"Rank":52,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/nomic-ai\/nomic-embed-text-v1\">nomic-embed-text-v1<\/a>","Model Size (Million Parameters)":547,"Memory Usage (GB, fp32)":2.04,"Embedding Dimensions":768,"Max Tokens":8192,"Average":32.24,"GeoreviewClassification":28.69,"HeadlineClassification":31.06,"InappropriatenessClassification":51.92,"KinopoiskClassification":37.39,"RuReviewsClassification":44.25,"RuSciBenchGRNTIClassification":19.29,"RuSciBenchOECDClassification":14.8,"MassiveIntentClassification (ru)":29.22,"MassiveScenarioClassification (ru)":33.56}
{"index":9,"Rank":53,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-base-en-v1.5\">bge-base-en-v1.5<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average":31.96,"GeoreviewClassification":27.74,"HeadlineClassification":30.07,"InappropriatenessClassification":51.63,"KinopoiskClassification":35.58,"RuReviewsClassification":43.47,"RuSciBenchGRNTIClassification":17.34,"RuSciBenchOECDClassification":13.16,"MassiveIntentClassification (ru)":31.7,"MassiveScenarioClassification (ru)":36.99}
{"index":12,"Rank":54,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-small-en-v1.5\">bge-small-en-v1.5<\/a>","Model Size (Million Parameters)":24,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":512,"Max Tokens":512,"Average":29.63,"GeoreviewClassification":27.23,"HeadlineClassification":29.75,"InappropriatenessClassification":51.22,"KinopoiskClassification":35.31,"RuReviewsClassification":43.95,"RuSciBenchGRNTIClassification":13.96,"RuSciBenchOECDClassification":10.76,"MassiveIntentClassification (ru)":24.69,"MassiveScenarioClassification (ru)":29.84}
{"index":59,"Rank":55,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-large\">e5-large<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Embedding Dimensions":1024,"Max Tokens":512,"Average":29.54,"GeoreviewClassification":28.65,"HeadlineClassification":29.77,"InappropriatenessClassification":52.5,"KinopoiskClassification":35.86,"RuReviewsClassification":43.2,"RuSciBenchGRNTIClassification":14.83,"RuSciBenchOECDClassification":11.64,"MassiveIntentClassification (ru)":22.51,"MassiveScenarioClassification (ru)":26.94}
{"index":80,"Rank":56,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":514,"Average":28.82,"GeoreviewClassification":25.93,"HeadlineClassification":28.53,"InappropriatenessClassification":51.82,"KinopoiskClassification":34.18,"RuReviewsClassification":42.33,"RuSciBenchGRNTIClassification":13.29,"RuSciBenchOECDClassification":10.62,"MassiveIntentClassification (ru)":23.98,"MassiveScenarioClassification (ru)":28.71}
{"index":79,"Rank":57,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":384,"Max Tokens":512,"Average":28.75,"GeoreviewClassification":27.08,"HeadlineClassification":27.77,"InappropriatenessClassification":51.73,"KinopoiskClassification":33.93,"RuReviewsClassification":41.79,"RuSciBenchGRNTIClassification":10.08,"RuSciBenchOECDClassification":8.3,"MassiveIntentClassification (ru)":27.58,"MassiveScenarioClassification (ru)":30.46}
{"index":57,"Rank":58,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-base\">e5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average":28.17,"GeoreviewClassification":27.81,"HeadlineClassification":29.8,"InappropriatenessClassification":51.95,"KinopoiskClassification":34.33,"RuReviewsClassification":41.31,"RuSciBenchGRNTIClassification":10.71,"RuSciBenchOECDClassification":9.01,"MassiveIntentClassification (ru)":21.3,"MassiveScenarioClassification (ru)":27.27}
{"index":78,"Rank":59,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Embedding Dimensions":384,"Max Tokens":512,"Average":28.15,"GeoreviewClassification":23.49,"HeadlineClassification":28.49,"InappropriatenessClassification":50.85,"KinopoiskClassification":34.17,"RuReviewsClassification":42.49,"RuSciBenchGRNTIClassification":10.49,"RuSciBenchOECDClassification":8.31,"MassiveIntentClassification (ru)":26.33,"MassiveScenarioClassification (ru)":28.75}
{"index":62,"Rank":60,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-small\">e5-small<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Embedding Dimensions":384,"Max Tokens":512,"Average":25.28,"GeoreviewClassification":27.15,"HeadlineClassification":28.01,"InappropriatenessClassification":51.46,"KinopoiskClassification":33.59,"RuReviewsClassification":39.68,"RuSciBenchGRNTIClassification":8.39,"RuSciBenchOECDClassification":7.24,"MassiveIntentClassification (ru)":13.65,"MassiveScenarioClassification (ru)":18.39}
{"index":4,"Rank":61,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/github.com\/facebookresearch\/LASER\">LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Embedding Dimensions":1024,"Max Tokens":"N\/A","Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":44.29,"MassiveScenarioClassification (ru)":51.84}
{"index":7,"Rank":62,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Alibaba-NLP\/gte-multilingual-base\">gte-multilingual-base<\/a>","Model Size (Million Parameters)":305,"Memory Usage (GB, fp32)":1.14,"Embedding Dimensions":768,"Max Tokens":8192,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":67.46,"MassiveScenarioClassification (ru)":71.65}
{"index":8,"Rank":63,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Alignment-Lab-AI\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":76.63,"MassiveScenarioClassification (ru)":80.52}
{"index":13,"Rank":64,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BookingCare\/multilingual-e5-base-similarity-v1-onnx-quantized\">multilingual-e5-base-similarity-v1-onnx-quantized<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":69.02,"MassiveScenarioClassification (ru)":72.99}
{"index":14,"Rank":65,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ClayAtlas\/winberta-base\">winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":39.69,"MassiveScenarioClassification (ru)":48.66}
{"index":15,"Rank":66,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ClayAtlas\/winberta-large\">winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":11.27,"MassiveScenarioClassification (ru)":16.71}
{"index":16,"Rank":67,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ClayAtlas\/windberta-large\">windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":10.4,"MassiveScenarioClassification (ru)":18.96}
{"index":28,"Rank":68,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Labib11\/MUG-B-1.6\">MUG-B-1.6<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Embedding Dimensions":1024,"Max Tokens":512,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":31.82,"MassiveScenarioClassification (ru)":37.73}
{"index":30,"Rank":70,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Maxthemacaque\/onnx-gte-multilingual-base\">onnx-gte-multilingual-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":67.46,"MassiveScenarioClassification (ru)":71.65}
{"index":31,"Rank":71,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Muennighoff\/SGPT-125M-weightedmean-msmarco-specb-bitfit\">SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":2048,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":35.97,"MassiveScenarioClassification (ru)":32.76}
{"index":32,"Rank":72,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Muennighoff\/SGPT-125M-weightedmean-nli-bitfit\">SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":2048,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":37.46,"MassiveScenarioClassification (ru)":35.95}
{"index":33,"Rank":73,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Naresh\/multilingual-e5-base-Q4_K_M-GGUF\">multilingual-e5-base-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":69.02,"MassiveScenarioClassification (ru)":72.99}
{"index":40,"Rank":74,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/amazon\/Titan-text-embeddings-v2\">Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":60.85,"MassiveScenarioClassification (ru)":65.42}
{"index":41,"Rank":75,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/bigscience\/sgpt-bloom-7b1-msmarco\">sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":7068,"Memory Usage (GB, fp32)":26.33,"Embedding Dimensions":4096,"Max Tokens":2048,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":58.32,"MassiveScenarioClassification (ru)":61.6}
{"index":42,"Rank":76,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/brahmairesearch\/slx-v0.1\">slx-v0.1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.08,"Embedding Dimensions":384,"Max Tokens":512,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":27.14,"MassiveScenarioClassification (ru)":29.95}
{"index":49,"Rank":77,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/dinab\/multilingual-e5-base-Q4_K_M-GGUF\">multilingual-e5-base-Q4_K_M-GGUF<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":69.02,"MassiveScenarioClassification (ru)":72.99}
{"index":51,"Rank":78,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/facebook\/SONAR\">SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":62.63,"MassiveScenarioClassification (ru)":67.96}
{"index":54,"Rank":81,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ibm-granite\/granite-embedding-107m-multilingual\">granite-embedding-107m-multilingual<\/a>","Model Size (Million Parameters)":107,"Memory Usage (GB, fp32)":0.4,"Embedding Dimensions":384,"Max Tokens":514,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":"","MassiveScenarioClassification (ru)":""}
{"index":55,"Rank":82,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ibm-granite\/granite-embedding-278m-multilingual\">granite-embedding-278m-multilingual<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":"","MassiveScenarioClassification (ru)":""}
{"index":56,"Rank":83,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/infly\/inf-retriever-v1\">inf-retriever-v1<\/a>","Model Size (Million Parameters)":7069,"Memory Usage (GB, fp32)":26.33,"Embedding Dimensions":3584,"Max Tokens":131072,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":"","MassiveScenarioClassification (ru)":""}
{"index":69,"Rank":84,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/jinaai\/jina-embeddings-v2-base-en\">jina-embeddings-v2-base-en<\/a>","Model Size (Million Parameters)":275,"Memory Usage (GB, fp32)":1.02,"Embedding Dimensions":768,"Max Tokens":8192,"Average":"","GeoreviewClassification":21.25,"HeadlineClassification":19.82,"InappropriatenessClassification":51.48,"KinopoiskClassification":"","RuReviewsClassification":33.7,"RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":7.62,"MassiveScenarioClassification (ru)":11.82}
{"index":72,"Rank":85,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/malenia1\/ternary-weight-embedding\">ternary-weight-embedding<\/a>","Model Size (Million Parameters)":99,"Memory Usage (GB, fp32)":0.37,"Embedding Dimensions":1792,"Max Tokens":512,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":25.53,"MassiveScenarioClassification (ru)":25.98}
{"index":74,"Rank":86,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/qinxianliu\/FAB-Ramy-v1\">FAB-Ramy-v1<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":514,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":"","MassiveScenarioClassification (ru)":""}
{"index":75,"Rank":87,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/qinxianliu\/FAE-v1\">FAE-v1<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Embedding Dimensions":1024,"Max Tokens":512,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":"","MassiveScenarioClassification (ru)":""}
{"index":76,"Rank":88,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/qinxianliu\/FUE-v1\">FUE-v1<\/a>","Model Size (Million Parameters)":109,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":514,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":"","MassiveScenarioClassification (ru)":""}
{"index":81,"Rank":89,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/allenai-specter\">allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":25.3,"MassiveScenarioClassification (ru)":28.16}
{"index":82,"Rank":90,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/average_word_embeddings_komninos\">average_word_embeddings_komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Embedding Dimensions":300,"Max Tokens":"N\/A","Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":"","MassiveScenarioClassification (ru)":""}
{"index":83,"Rank":91,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/distiluse-base-multilingual-cased-v2\">distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Embedding Dimensions":512,"Max Tokens":512,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":57.96,"MassiveScenarioClassification (ru)":65.41}
{"index":84,"Rank":92,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/gtr-t5-large\">gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":16.82,"MassiveScenarioClassification (ru)":25.85}
{"index":85,"Rank":93,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/gtr-t5-xl\">gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Embedding Dimensions":768,"Max Tokens":512,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":36.58,"MassiveScenarioClassification (ru)":43.44}
{"index":88,"Rank":94,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/sentence-t5-base\">sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":14.82,"MassiveScenarioClassification (ru)":20.69}
{"index":89,"Rank":95,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/sentence-t5-large\">sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":17.32,"MassiveScenarioClassification (ru)":27.47}
{"index":90,"Rank":96,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/sentence-t5-xl\">sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Embedding Dimensions":768,"Max Tokens":512,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":33.46,"MassiveScenarioClassification (ru)":40.73}
{"index":94,"Rank":97,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/twadada\/llm_mse\">llm_mse<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":2.65,"MassiveScenarioClassification (ru)":8.14}