Roman Solomatin
update all data
b23830c unverified
raw
history blame
13.8 kB
{"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/cloud.google.com\/vertex-ai\/generative-ai\/docs\/embeddings\/get-text-embeddings#latest_models\">google-gecko.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"SummEval":32.63}
{"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/cloud.google.com\/vertex-ai\/generative-ai\/docs\/embeddings\/get-text-embeddings#latest_models\">google-gecko-256.text-embedding-preview-0409<\/a>","Model Size (Million Parameters)":1200,"Memory Usage (GB, fp32)":4.47,"SummEval":32.36}
{"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"SummEval":31.57}
{"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Alibaba-NLP\/gte-Qwen1.5-7B-instruct\">gte-Qwen1.5-7B-instruct<\/a>","Model Size (Million Parameters)":7099,"Memory Usage (GB, fp32)":26.45,"SummEval":31.46}
{"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/McGill-NLP\/LLM2Vec-Meta-Llama-3-8B-Instruct-mntp-unsup-simcse\">LLM2Vec-Meta-Llama-3-unsupervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"SummEval":31.45}
{"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/sentence-t5-base\">sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":31.39}
{"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/McGill-NLP\/LLM2Vec-Llama-2-7b-chat-hf-mntp-unsup-simcse\">LLM2Vec-Llama-2-unsupervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"SummEval":31.38}
{"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/McGill-NLP\/LLM2Vec-Sheared-LLaMA-mntp-unsup-simcse\">LLM2Vec-Sheared-Llama-unsupervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"SummEval":31.23}
{"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/princeton-nlp\/sup-simcse-bert-base-uncased\">sup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":31.17}
{"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/princeton-nlp\/unsup-simcse-bert-base-uncased\">unsup-simcse-bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":31.15}
{"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/openai.com\/blog\/new-embedding-models-and-api-updates\">text-embedding-3-small<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":31.12}
{"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"SummEval":31.05}
{"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/www.elastic.co\/guide\/en\/machine-learning\/current\/ml-nlp-elser.html\">elser-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":31.03}
{"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/docs.voyageai.com\/embeddings\/\">voyage-lite-02-instruct<\/a>","Model Size (Million Parameters)":1220,"Memory Usage (GB, fp32)":4.54,"SummEval":31.01}
{"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/docs.voyageai.com\/embeddings\/\">voyage-lite-01-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.97}
{"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/McGill-NLP\/LLM2Vec-Meta-Llama-3-8B-Instruct-mntp-supervised\">LLM2Vec-Meta-Llama-3-supervised<\/a>","Model Size (Million Parameters)":7505,"Memory Usage (GB, fp32)":27.96,"SummEval":30.94}
{"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/docs.voyageai.com\/embeddings\/\">voyage-large-2-instruct<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.84}
{"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"SummEval":30.81}
{"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/openai.com\/blog\/new-and-improved-embedding-model\">text-embedding-ada-002<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":30.8}
{"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"SummEval":30.67}
{"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/gtr-t5-xxl\">gtr-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"SummEval":30.64}
{"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/average_word_embeddings_komninos\">komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"SummEval":30.49}
{"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/nomic-ai\/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-512<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEval":30.47}
{"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/nthakur\/contriever-base-msmarco\">contriever-base-msmarco<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":30.36}
{"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/GritLM\/GritLM-7B\">GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"SummEval":30.26}
{"Rank":26,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"SummEval":30.23}
{"Rank":27,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/gtr-t5-xl\">gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"SummEval":30.21}
{"Rank":28,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/McGill-NLP\/LLM2Vec-Mistral-7B-Instruct-v2-mntp-unsup-simcse\">LLM2Vec-Mistral-unsupervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"SummEval":30.19}
{"Rank":29,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/sentence-t5-xxl\">sentence-t5-xxl<\/a>","Model Size (Million Parameters)":4865,"Memory Usage (GB, fp32)":18.12,"SummEval":30.08}
{"Rank":30,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/nomic-ai\/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-256<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEval":30.05}
{"Rank":31,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"SummEval":30.04}
{"Rank":32,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/McGill-NLP\/LLM2Vec-Sheared-LLaMA-mntp-supervised\">LLM2Vec-Sheared-Llama-supervised<\/a>","Model Size (Million Parameters)":1280,"Memory Usage (GB, fp32)":4.77,"SummEval":30.01}
{"Rank":33,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/McGill-NLP\/LLM2Vec-Mistral-7B-Instruct-v2-mntp-supervised\">LLM2Vec-Mistral-supervised<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"SummEval":29.96}
{"Rank":34,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/openai.com\/blog\/new-embedding-models-and-api-updates\">text-embedding-3-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.92}
{"Rank":35,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/openai.com\/blog\/new-embedding-models-and-api-updates\">text-embedding-3-large-256<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":29.92}
{"Rank":36,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/sentence-t5-xl\">sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"SummEval":29.91}
{"Rank":37,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/bert-base-uncased\">bert-base-uncased<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":29.82}
{"Rank":38,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/gtr-t5-base\">gtr-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":29.67}
{"Rank":39,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"SummEval":29.65}
{"Rank":40,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/sentence-t5-large\">sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"SummEval":29.64}
{"Rank":41,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/nomic-ai\/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-128<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEval":29.59}
{"Rank":42,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/msmarco-bert-co-condensor\">msmarco-bert-co-condensor<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":29.5}
{"Rank":43,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/gtr-t5-large\">gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"SummEval":29.5}
{"Rank":44,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/average_word_embeddings_glove.6B.300d\">glove.6B.300d<\/a>","Model Size (Million Parameters)":120,"Memory Usage (GB, fp32)":0.45,"SummEval":28.87}
{"Rank":45,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/McGill-NLP\/LLM2Vec-Llama-2-7b-chat-hf-mntp-supervised\">LLM2Vec-Llama-2-supervised<\/a>","Model Size (Million Parameters)":6607,"Memory Usage (GB, fp32)":24.61,"SummEval":28.49}
{"Rank":46,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/nomic-ai\/nomic-embed-text-v1.5\">nomic-embed-text-v1.5-64<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"SummEval":28.41}
{"Rank":47,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"SummEval":27.9}
{"Rank":48,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/allenai-specter\">allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":27.66}
{"Rank":49,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"SummEval":27.49}
{"Rank":50,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/openai.com\/blog\/introducing-text-and-code-embeddings\">text-similarity-ada-001<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","SummEval":26.94}
{"Rank":51,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/github.com\/facebookresearch\/LASER\">LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"SummEval":26.8}