Spaces:
Running
Running
{"index":31,"Rank":1,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/e5-mistral-7b-instruct\">e5-mistral-7b-instruct<\/a>","Model Size (Million Parameters)":7111,"Memory Usage (GB, fp32)":26.49,"Embedding Dimensions":4096,"Max Tokens":32768,"Average":69.11,"GeoreviewClassification":56.72,"HeadlineClassification":87.02,"InappropriatenessClassification":70.36,"KinopoiskClassification":68.35,"RuReviewsClassification":70.57,"RuSciBenchGRNTIClassification":66.05,"RuSciBenchOECDClassification":52.11,"MassiveIntentClassification (ru)":73.74,"MassiveScenarioClassification (ru)":77.1} | |
{"index":9,"Rank":2,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/GritLM\/GritLM-7B\">GritLM-7B<\/a>","Model Size (Million Parameters)":7240,"Memory Usage (GB, fp32)":26.97,"Embedding Dimensions":"","Max Tokens":4096,"Average":67.48,"GeoreviewClassification":53.47,"HeadlineClassification":85.66,"InappropriatenessClassification":65.29,"KinopoiskClassification":64.25,"RuReviewsClassification":68.58,"RuSciBenchGRNTIClassification":64.56,"RuSciBenchOECDClassification":51.2,"MassiveIntentClassification (ru)":76.01,"MassiveScenarioClassification (ru)":78.28} | |
{"index":34,"Rank":3,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large-instruct\">multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average":66.31,"GeoreviewClassification":55.9,"HeadlineClassification":86.18,"InappropriatenessClassification":65.53,"KinopoiskClassification":66.12,"RuReviewsClassification":68.56,"RuSciBenchGRNTIClassification":65.07,"RuSciBenchOECDClassification":50.21,"MassiveIntentClassification (ru)":67.6,"MassiveScenarioClassification (ru)":71.59} | |
{"index":38,"Rank":4,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/jinaai\/jina-embeddings-v3\">jina-embeddings-v3<\/a>","Model Size (Million Parameters)":572,"Memory Usage (GB, fp32)":2.13,"Embedding Dimensions":1024,"Max Tokens":8194,"Average":64.41,"GeoreviewClassification":48.01,"HeadlineClassification":75.08,"InappropriatenessClassification":61.05,"KinopoiskClassification":62.39,"RuReviewsClassification":67.58,"RuSciBenchGRNTIClassification":59.19,"RuSciBenchOECDClassification":45.56,"MassiveIntentClassification (ru)":76.8,"MassiveScenarioClassification (ru)":84.06} | |
{"index":18,"Rank":5,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/ru-en-RoSBERTa\">ru-en-RoSBERTa<\/a>","Model Size (Million Parameters)":404,"Memory Usage (GB, fp32)":1.5,"Embedding Dimensions":1024,"Max Tokens":514,"Average":62.74,"GeoreviewClassification":49.7,"HeadlineClassification":78.0,"InappropriatenessClassification":61.32,"KinopoiskClassification":63.27,"RuReviewsClassification":67.96,"RuSciBenchGRNTIClassification":59.33,"RuSciBenchOECDClassification":46.33,"MassiveIntentClassification (ru)":66.97,"MassiveScenarioClassification (ru)":71.8} | |
{"index":28,"Rank":6,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-bge-m3\">USER-bge-m3<\/a>","Model Size (Million Parameters)":359,"Memory Usage (GB, fp32)":1.34,"Embedding Dimensions":1024,"Max Tokens":8192,"Average":61.92,"GeoreviewClassification":50.98,"HeadlineClassification":70.09,"InappropriatenessClassification":60.76,"KinopoiskClassification":63.33,"RuReviewsClassification":68.52,"RuSciBenchGRNTIClassification":57.67,"RuSciBenchOECDClassification":44.2,"MassiveIntentClassification (ru)":68.85,"MassiveScenarioClassification (ru)":72.9} | |
{"index":10,"Rank":7,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/HIT-TMG\/KaLM-embedding-multilingual-mini-v1\">KaLM-embedding-multilingual-mini-v1<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average":61.71,"GeoreviewClassification":47.69,"HeadlineClassification":83.46,"InappropriatenessClassification":61.32,"KinopoiskClassification":59.04,"RuReviewsClassification":66.09,"RuSciBenchGRNTIClassification":61.41,"RuSciBenchOECDClassification":48.67,"MassiveIntentClassification (ru)":60.84,"MassiveScenarioClassification (ru)":66.9} | |
{"index":33,"Rank":8,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-large\">multilingual-e5-large<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average":61.01,"GeoreviewClassification":49.69,"HeadlineClassification":77.19,"InappropriatenessClassification":61.59,"KinopoiskClassification":56.59,"RuReviewsClassification":65.28,"RuSciBenchGRNTIClassification":58.2,"RuSciBenchOECDClassification":43.91,"MassiveIntentClassification (ru)":65.76,"MassiveScenarioClassification (ru)":70.85} | |
{"index":2,"Rank":9,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/BAAI\/bge-m3\">bge-m3<\/a>","Model Size (Million Parameters)":567,"Memory Usage (GB, fp32)":2.11,"Embedding Dimensions":1024,"Max Tokens":8192,"Average":60.46,"GeoreviewClassification":48.27,"HeadlineClassification":70.32,"InappropriatenessClassification":59.87,"KinopoiskClassification":58.23,"RuReviewsClassification":66.91,"RuSciBenchGRNTIClassification":55.81,"RuSciBenchOECDClassification":42.57,"MassiveIntentClassification (ru)":68.75,"MassiveScenarioClassification (ru)":73.42} | |
{"index":27,"Rank":10,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/USER-base\">USER-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Embedding Dimensions":768,"Max Tokens":512,"Average":59.88,"GeoreviewClassification":47.23,"HeadlineClassification":74.88,"InappropriatenessClassification":61.94,"KinopoiskClassification":55.69,"RuReviewsClassification":66.44,"RuSciBenchGRNTIClassification":55.55,"RuSciBenchOECDClassification":43.28,"MassiveIntentClassification (ru)":65.57,"MassiveScenarioClassification (ru)":68.33} | |
{"index":53,"Rank":11,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/LaBSE-ru-turbo\">LaBSE-ru-turbo<\/a>","Model Size (Million Parameters)":128,"Memory Usage (GB, fp32)":0.48,"Embedding Dimensions":768,"Max Tokens":512,"Average":59.23,"GeoreviewClassification":46.04,"HeadlineClassification":69.98,"InappropriatenessClassification":61.39,"KinopoiskClassification":53.59,"RuReviewsClassification":64.58,"RuSciBenchGRNTIClassification":56.67,"RuSciBenchOECDClassification":43.58,"MassiveIntentClassification (ru)":66.08,"MassiveScenarioClassification (ru)":71.13} | |
{"index":32,"Rank":12,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-base\">multilingual-e5-base<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average":58.26,"GeoreviewClassification":46.05,"HeadlineClassification":75.64,"InappropriatenessClassification":58.78,"KinopoiskClassification":50.89,"RuReviewsClassification":62.99,"RuSciBenchGRNTIClassification":56.28,"RuSciBenchOECDClassification":42.69,"MassiveIntentClassification (ru)":62.78,"MassiveScenarioClassification (ru)":68.21} | |
{"index":19,"Rank":13,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_mt_nlu_ru\">sbert_large_mt_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Embedding Dimensions":1024,"Max Tokens":514,"Average":57.52,"GeoreviewClassification":39.67,"HeadlineClassification":77.19,"InappropriatenessClassification":64.64,"KinopoiskClassification":50.33,"RuReviewsClassification":58.29,"RuSciBenchGRNTIClassification":54.19,"RuSciBenchOECDClassification":43.8,"MassiveIntentClassification (ru)":61.42,"MassiveScenarioClassification (ru)":68.13} | |
{"index":20,"Rank":14,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ai-forever\/sbert_large_nlu_ru\">sbert_large_nlu_ru<\/a>","Model Size (Million Parameters)":427,"Memory Usage (GB, fp32)":1.59,"Embedding Dimensions":1024,"Max Tokens":512,"Average":57.24,"GeoreviewClassification":39.97,"HeadlineClassification":79.26,"InappropriatenessClassification":62.52,"KinopoiskClassification":49.51,"RuReviewsClassification":58.27,"RuSciBenchGRNTIClassification":53.9,"RuSciBenchOECDClassification":43.04,"MassiveIntentClassification (ru)":61.09,"MassiveScenarioClassification (ru)":67.6} | |
{"index":49,"Rank":15,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-mpnet-base-v2\">paraphrase-multilingual-mpnet-base-v2<\/a>","Model Size (Million Parameters)":278,"Memory Usage (GB, fp32)":1.04,"Embedding Dimensions":768,"Max Tokens":514,"Average":56.88,"GeoreviewClassification":42.33,"HeadlineClassification":70.35,"InappropriatenessClassification":59.32,"KinopoiskClassification":44.31,"RuReviewsClassification":62.33,"RuSciBenchGRNTIClassification":56.01,"RuSciBenchOECDClassification":44.14,"MassiveIntentClassification (ru)":63.23,"MassiveScenarioClassification (ru)":69.92} | |
{"index":14,"Rank":16,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Marqo\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average":56.45,"GeoreviewClassification":44.66,"HeadlineClassification":73.94,"InappropriatenessClassification":59.16,"KinopoiskClassification":49.96,"RuReviewsClassification":61.18,"RuSciBenchGRNTIClassification":54.99,"RuSciBenchOECDClassification":41.72,"MassiveIntentClassification (ru)":58.65,"MassiveScenarioClassification (ru)":63.77} | |
{"index":35,"Rank":17,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/intfloat\/multilingual-e5-small\">multilingual-e5-small<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average":56.44,"GeoreviewClassification":44.66,"HeadlineClassification":73.94,"InappropriatenessClassification":59.16,"KinopoiskClassification":49.96,"RuReviewsClassification":61.18,"RuSciBenchGRNTIClassification":54.99,"RuSciBenchOECDClassification":41.72,"MassiveIntentClassification (ru)":58.43,"MassiveScenarioClassification (ru)":63.89} | |
{"index":29,"Rank":18,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/deepvk\/deberta-v1-base\">deberta-v1-base<\/a>","Model Size (Million Parameters)":124,"Memory Usage (GB, fp32)":0.46,"Embedding Dimensions":768,"Max Tokens":512,"Average":56.18,"GeoreviewClassification":40.19,"HeadlineClassification":78.75,"InappropriatenessClassification":61.33,"KinopoiskClassification":48.78,"RuReviewsClassification":55.66,"RuSciBenchGRNTIClassification":53.53,"RuSciBenchOECDClassification":41.34,"MassiveIntentClassification (ru)":61.32,"MassiveScenarioClassification (ru)":64.71} | |
{"index":6,"Rank":19,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/distilrubert-small-cased-conversational\">distilrubert-small-cased-conversational<\/a>","Model Size (Million Parameters)":106,"Memory Usage (GB, fp32)":0.39,"Embedding Dimensions":768,"Max Tokens":512,"Average":55.15,"GeoreviewClassification":38.95,"HeadlineClassification":75.59,"InappropriatenessClassification":60.68,"KinopoiskClassification":49.67,"RuReviewsClassification":54.05,"RuSciBenchGRNTIClassification":48.53,"RuSciBenchOECDClassification":37.65,"MassiveIntentClassification (ru)":63.12,"MassiveScenarioClassification (ru)":68.08} | |
{"index":54,"Rank":20,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sergeyzh\/rubert-tiny-turbo\">rubert-tiny-turbo<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Embedding Dimensions":312,"Max Tokens":2048,"Average":55.01,"GeoreviewClassification":41.36,"HeadlineClassification":68.9,"InappropriatenessClassification":59.11,"KinopoiskClassification":50.47,"RuReviewsClassification":60.66,"RuSciBenchGRNTIClassification":52.93,"RuSciBenchOECDClassification":40.79,"MassiveIntentClassification (ru)":57.98,"MassiveScenarioClassification (ru)":62.9} | |
{"index":24,"Rank":21,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/LaBSE-en-ru\">LaBSE-en-ru<\/a>","Model Size (Million Parameters)":129,"Memory Usage (GB, fp32)":0.48,"Embedding Dimensions":768,"Max Tokens":512,"Average":54.98,"GeoreviewClassification":40.89,"HeadlineClassification":68.75,"InappropriatenessClassification":58.48,"KinopoiskClassification":49.85,"RuReviewsClassification":58.01,"RuSciBenchGRNTIClassification":52.8,"RuSciBenchOECDClassification":40.36,"MassiveIntentClassification (ru)":60.53,"MassiveScenarioClassification (ru)":65.15} | |
{"index":39,"Rank":22,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/LaBSE\">LaBSE<\/a>","Model Size (Million Parameters)":471,"Memory Usage (GB, fp32)":1.75,"Embedding Dimensions":768,"Max Tokens":512,"Average":54.71,"GeoreviewClassification":40.86,"HeadlineClassification":68.75,"InappropriatenessClassification":58.52,"KinopoiskClassification":46.77,"RuReviewsClassification":58.01,"RuSciBenchGRNTIClassification":53.04,"RuSciBenchOECDClassification":40.48,"MassiveIntentClassification (ru)":60.67,"MassiveScenarioClassification (ru)":65.25} | |
{"index":48,"Rank":23,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/paraphrase-multilingual-MiniLM-L12-v2\">paraphrase-multilingual-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average":53.77,"GeoreviewClassification":38.24,"HeadlineClassification":68.3,"InappropriatenessClassification":58.18,"KinopoiskClassification":41.45,"RuReviewsClassification":58.88,"RuSciBenchGRNTIClassification":53.19,"RuSciBenchOECDClassification":41.41,"MassiveIntentClassification (ru)":59.06,"MassiveScenarioClassification (ru)":65.25} | |
{"index":26,"Rank":24,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny2\">rubert-tiny2<\/a>","Model Size (Million Parameters)":29,"Memory Usage (GB, fp32)":0.11,"Embedding Dimensions":2048,"Max Tokens":514,"Average":52.17,"GeoreviewClassification":39.64,"HeadlineClassification":74.19,"InappropriatenessClassification":58.57,"KinopoiskClassification":49.06,"RuReviewsClassification":56.99,"RuSciBenchGRNTIClassification":45.63,"RuSciBenchOECDClassification":35.48,"MassiveIntentClassification (ru)":50.83,"MassiveScenarioClassification (ru)":59.15} | |
{"index":7,"Rank":25,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased\">rubert-base-cased<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average":51.6,"GeoreviewClassification":37.22,"HeadlineClassification":75.23,"InappropriatenessClassification":57.34,"KinopoiskClassification":49.91,"RuReviewsClassification":50.74,"RuSciBenchGRNTIClassification":48.03,"RuSciBenchOECDClassification":36.13,"MassiveIntentClassification (ru)":53.02,"MassiveScenarioClassification (ru)":56.79} | |
{"index":8,"Rank":26,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/DeepPavlov\/rubert-base-cased-sentence\">rubert-base-cased-sentence<\/a>","Model Size (Million Parameters)":180,"Memory Usage (GB, fp32)":0.67,"Embedding Dimensions":768,"Max Tokens":512,"Average":51.27,"GeoreviewClassification":38.05,"HeadlineClassification":67.64,"InappropriatenessClassification":58.27,"KinopoiskClassification":45.86,"RuReviewsClassification":58.34,"RuSciBenchGRNTIClassification":52.18,"RuSciBenchOECDClassification":40.11,"MassiveIntentClassification (ru)":49.1,"MassiveScenarioClassification (ru)":51.91} | |
{"index":25,"Rank":27,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/cointegrated\/rubert-tiny\">rubert-tiny<\/a>","Model Size (Million Parameters)":12,"Memory Usage (GB, fp32)":0.04,"Embedding Dimensions":512,"Max Tokens":514,"Average":44.55,"GeoreviewClassification":33.45,"HeadlineClassification":57.65,"InappropriatenessClassification":54.5,"KinopoiskClassification":41.36,"RuReviewsClassification":49.56,"RuSciBenchGRNTIClassification":35.71,"RuSciBenchOECDClassification":26.51,"MassiveIntentClassification (ru)":50.1,"MassiveScenarioClassification (ru)":52.15} | |
{"index":42,"Rank":28,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-mpnet-base-v2\">all-mpnet-base-v2<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":514,"Average":28.82,"GeoreviewClassification":25.93,"HeadlineClassification":28.53,"InappropriatenessClassification":51.82,"KinopoiskClassification":34.18,"RuReviewsClassification":42.33,"RuSciBenchGRNTIClassification":13.29,"RuSciBenchOECDClassification":10.62,"MassiveIntentClassification (ru)":23.98,"MassiveScenarioClassification (ru)":28.71} | |
{"index":41,"Rank":29,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L6-v2\">all-MiniLM-L6-v2<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.09,"Embedding Dimensions":384,"Max Tokens":512,"Average":28.75,"GeoreviewClassification":27.08,"HeadlineClassification":27.77,"InappropriatenessClassification":51.73,"KinopoiskClassification":33.93,"RuReviewsClassification":41.79,"RuSciBenchGRNTIClassification":10.08,"RuSciBenchOECDClassification":8.3,"MassiveIntentClassification (ru)":27.58,"MassiveScenarioClassification (ru)":30.46} | |
{"index":40,"Rank":30,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/all-MiniLM-L12-v2\">all-MiniLM-L12-v2<\/a>","Model Size (Million Parameters)":33,"Memory Usage (GB, fp32)":0.12,"Embedding Dimensions":384,"Max Tokens":512,"Average":28.15,"GeoreviewClassification":23.49,"HeadlineClassification":28.49,"InappropriatenessClassification":50.85,"KinopoiskClassification":34.17,"RuReviewsClassification":42.49,"RuSciBenchGRNTIClassification":10.49,"RuSciBenchOECDClassification":8.31,"MassiveIntentClassification (ru)":26.33,"MassiveScenarioClassification (ru)":28.75} | |
{"index":0,"Rank":31,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/github.com\/facebookresearch\/LASER\">LASER2<\/a>","Model Size (Million Parameters)":43,"Memory Usage (GB, fp32)":0.16,"Embedding Dimensions":1024,"Max Tokens":"N\/A","Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":44.29,"MassiveScenarioClassification (ru)":51.84} | |
{"index":1,"Rank":32,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Alibaba-NLP\/gte-multilingual-base\">gte-multilingual-base<\/a>","Model Size (Million Parameters)":305,"Memory Usage (GB, fp32)":1.14,"Embedding Dimensions":768,"Max Tokens":8192,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":67.46,"MassiveScenarioClassification (ru)":71.65} | |
{"index":3,"Rank":33,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ClayAtlas\/winberta-base\">winberta-base<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":39.69,"MassiveScenarioClassification (ru)":48.66} | |
{"index":4,"Rank":34,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ClayAtlas\/winberta-large\">winberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":11.27,"MassiveScenarioClassification (ru)":16.71} | |
{"index":5,"Rank":35,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/ClayAtlas\/windberta-large\">windberta-large<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":10.4,"MassiveScenarioClassification (ru)":18.96} | |
{"index":11,"Rank":36,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Huy227\/gte-multi\">gte-multi<\/a>","Model Size (Million Parameters)":305,"Memory Usage (GB, fp32)":1.14,"Embedding Dimensions":768,"Max Tokens":8192,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":67.46,"MassiveScenarioClassification (ru)":71.65} | |
{"index":12,"Rank":37,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Labib11\/MUG-B-1.6\">MUG-B-1.6<\/a>","Model Size (Million Parameters)":335,"Memory Usage (GB, fp32)":1.25,"Embedding Dimensions":1024,"Max Tokens":512,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":31.82,"MassiveScenarioClassification (ru)":37.73} | |
{"index":15,"Rank":39,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Muennighoff\/SGPT-125M-weightedmean-msmarco-specb-bitfit\">SGPT-125M-weightedmean-msmarco-specb-bitfit<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":2048,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":35.97,"MassiveScenarioClassification (ru)":32.76} | |
{"index":16,"Rank":40,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Muennighoff\/SGPT-125M-weightedmean-nli-bitfit\">SGPT-125M-weightedmean-nli-bitfit<\/a>","Model Size (Million Parameters)":138,"Memory Usage (GB, fp32)":0.51,"Embedding Dimensions":768,"Max Tokens":2048,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":37.46,"MassiveScenarioClassification (ru)":35.95} | |
{"index":17,"Rank":41,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/Nextcloud-AI\/multilingual-e5-large-instruct\">multilingual-e5-large-instruct<\/a>","Model Size (Million Parameters)":560,"Memory Usage (GB, fp32)":2.09,"Embedding Dimensions":1024,"Max Tokens":514,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":74.16,"MassiveScenarioClassification (ru)":77.71} | |
{"index":21,"Rank":42,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/amazon\/Titan-text-embeddings-v2\">Titan-text-embeddings-v2<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":60.85,"MassiveScenarioClassification (ru)":65.42} | |
{"index":22,"Rank":43,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/bigscience\/sgpt-bloom-7b1-msmarco\">sgpt-bloom-7b1-msmarco<\/a>","Model Size (Million Parameters)":7068,"Memory Usage (GB, fp32)":26.33,"Embedding Dimensions":4096,"Max Tokens":2048,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":58.32,"MassiveScenarioClassification (ru)":61.6} | |
{"index":23,"Rank":44,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/brahmairesearch\/slx-v0.1\">slx-v0.1<\/a>","Model Size (Million Parameters)":23,"Memory Usage (GB, fp32)":0.08,"Embedding Dimensions":384,"Max Tokens":512,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":27.14,"MassiveScenarioClassification (ru)":29.95} | |
{"index":30,"Rank":45,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/facebook\/SONAR\">SONAR<\/a>","Model Size (Million Parameters)":"","Memory Usage (GB, fp32)":"","Embedding Dimensions":"","Max Tokens":"","Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":62.63,"MassiveScenarioClassification (ru)":67.96} | |
{"index":36,"Rank":46,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/izhx\/udever-bloom-1b1\">udever-bloom-1b1<\/a>","Model Size (Million Parameters)":1065,"Memory Usage (GB, fp32)":3.97,"Embedding Dimensions":1536,"Max Tokens":2048,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":52.99,"MassiveScenarioClassification (ru)":54.26} | |
{"index":37,"Rank":47,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/izhx\/udever-bloom-560m\">udever-bloom-560m<\/a>","Model Size (Million Parameters)":559,"Memory Usage (GB, fp32)":2.08,"Embedding Dimensions":1024,"Max Tokens":2048,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":47.61,"MassiveScenarioClassification (ru)":49.94} | |
{"index":43,"Rank":48,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/allenai-specter\">allenai-specter<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":25.3,"MassiveScenarioClassification (ru)":28.16} | |
{"index":44,"Rank":49,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/average_word_embeddings_komninos\">average_word_embeddings_komninos<\/a>","Model Size (Million Parameters)":134,"Memory Usage (GB, fp32)":0.5,"Embedding Dimensions":300,"Max Tokens":"N\/A","Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":"","MassiveScenarioClassification (ru)":""} | |
{"index":45,"Rank":50,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/distiluse-base-multilingual-cased-v2\">distiluse-base-multilingual-cased-v2<\/a>","Model Size (Million Parameters)":135,"Memory Usage (GB, fp32)":0.5,"Embedding Dimensions":512,"Max Tokens":512,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":57.96,"MassiveScenarioClassification (ru)":65.41} | |
{"index":46,"Rank":51,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/gtr-t5-large\">gtr-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":16.82,"MassiveScenarioClassification (ru)":25.85} | |
{"index":47,"Rank":52,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/gtr-t5-xl\">gtr-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Embedding Dimensions":768,"Max Tokens":512,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":36.58,"MassiveScenarioClassification (ru)":43.44} | |
{"index":50,"Rank":53,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/sentence-t5-base\">sentence-t5-base<\/a>","Model Size (Million Parameters)":110,"Memory Usage (GB, fp32)":0.41,"Embedding Dimensions":768,"Max Tokens":512,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":14.82,"MassiveScenarioClassification (ru)":20.69} | |
{"index":51,"Rank":54,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/sentence-t5-large\">sentence-t5-large<\/a>","Model Size (Million Parameters)":168,"Memory Usage (GB, fp32)":0.63,"Embedding Dimensions":768,"Max Tokens":512,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":17.32,"MassiveScenarioClassification (ru)":27.47} | |
{"index":52,"Rank":55,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/sentence-transformers\/sentence-t5-xl\">sentence-t5-xl<\/a>","Model Size (Million Parameters)":1240,"Memory Usage (GB, fp32)":4.62,"Embedding Dimensions":768,"Max Tokens":512,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":33.46,"MassiveScenarioClassification (ru)":40.73} | |
{"index":55,"Rank":56,"Model":"<a target=\"_blank\" style=\"text-decoration: underline\" href=\"https:\/\/huggingface.co\/shibing624\/text2vec-base-multilingual\">text2vec-base-multilingual<\/a>","Model Size (Million Parameters)":118,"Memory Usage (GB, fp32)":0.44,"Embedding Dimensions":384,"Max Tokens":512,"Average":"","GeoreviewClassification":"","HeadlineClassification":"","InappropriatenessClassification":"","KinopoiskClassification":"","RuReviewsClassification":"","RuSciBenchGRNTIClassification":"","RuSciBenchOECDClassification":"","MassiveIntentClassification (ru)":58.02,"MassiveScenarioClassification (ru)":62.31} | |