{ "monot5-3b-msmarco-10k": { "BitextMining": { "f1": [ { "Model": "monot5-3b-msmarco-10k" } ] }, "Classification": { "accuracy": [ { "Model": "monot5-3b-msmarco-10k" } ] }, "Clustering": { "v_measure": [ { "Model": "monot5-3b-msmarco-10k" } ] }, "PairClassification": { "ap": [ { "Model": "monot5-3b-msmarco-10k" } ] }, "Reranking": { "map": [ { "Model": "monot5-3b-msmarco-10k" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "monot5-3b-msmarco-10k" } ] }, "STS": { "spearman": [ { "Model": "monot5-3b-msmarco-10k" } ] }, "Summarization": { "spearman": [ { "Model": "monot5-3b-msmarco-10k" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "monot5-3b-msmarco-10k", "Core17InstructionRetrieval": 1.84, "News21InstructionRetrieval": 1.78, "Robust04InstructionRetrieval": 3.96 } ] } }, "LLM2Vec-Mistral-unsupervised": { "BitextMining": { "f1": [ { "Model": "LLM2Vec-Mistral-unsupervised" } ] }, "Classification": { "accuracy": [ { "Model": "LLM2Vec-Mistral-unsupervised", "AmazonCounterfactualClassification (en)": 76.94, "AmazonPolarityClassification": 85.29, "AmazonReviewsClassification (en)": 47.09, "Banking77Classification": 86.16, "EmotionClassification": 48.88, "ImdbClassification": 77.95, "MTOPDomainClassification (en)": 95.48, "MTOPIntentClassification (en)": 82.84, "MassiveIntentClassification (en)": 76.65, "MassiveScenarioClassification (en)": 79.99, "ToxicConversationsClassification": 70.71, "TweetSentimentExtractionClassification": 60.9 } ] }, "Clustering": { "v_measure": [ { "Model": "LLM2Vec-Mistral-unsupervised", "ArxivClusteringP2P": 47.56, "ArxivClusteringS2S": 39.92, "BiorxivClusteringP2P": 36.14, "BiorxivClusteringS2S": 30.26, "MedrxivClusteringP2P": 30.11, "MedrxivClusteringS2S": 26.93, "RedditClustering": 41.83, "RedditClusteringP2P": 62.08, "StackExchangeClustering": 67.34, "StackExchangeClusteringP2P": 34.5, "TwentyNewsgroupsClustering": 30.26 } ] }, "PairClassification": { "ap": [ { "Model": "LLM2Vec-Mistral-unsupervised", "SprintDuplicateQuestions": 91.3, "TwitterSemEval2015": 68.76, "TwitterURLCorpus": 82.76 } ] }, "Reranking": { "map": [ { "Model": "LLM2Vec-Mistral-unsupervised", "AskUbuntuDupQuestions": 58.6, "MindSmallReranking": 29.73, "SciDocsRR": 77.81, "StackOverflowDupQuestions": 49.8 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "LLM2Vec-Mistral-unsupervised", "ArguAna": 51.0, "CQADupstackRetrieval": 33.37, "ClimateFEVER": 22.97, "DBPedia": 25.48, "FEVER": 45.11, "FiQA2018": 27.24, "HotpotQA": 54.54, "MSMARCO": 19.13, "NFCorpus": 27.16, "NQ": 34.16, "QuoraRetrieval": 84.4, "SCIDOCS": 15.35, "SciFact": 68.68, "TRECCOVID": 55.67, "Touche2020": 6.54 } ] }, "STS": { "spearman": [ { "Model": "LLM2Vec-Mistral-unsupervised", "BIOSSES": 83.29, "SICK-R": 75.55, "STS12": 67.65, "STS13": 83.9, "STS14": 76.97, "STS15": 83.8, "STS16": 81.91, "STS17 (en-en)": 85.58, "STS22 (en)": 65.93, "STSBenchmark": 80.42 } ] }, "Summarization": { "spearman": [ { "Model": "LLM2Vec-Mistral-unsupervised", "SummEval": 30.19 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "LLM2Vec-Mistral-unsupervised" } ] } }, "bge-small-en-v1.5-instruct": { "BitextMining": { "f1": [ { "Model": "bge-small-en-v1.5-instruct" } ] }, "Classification": { "accuracy": [ { "Model": "bge-small-en-v1.5-instruct" } ] }, "Clustering": { "v_measure": [ { "Model": "bge-small-en-v1.5-instruct" } ] }, "PairClassification": { "ap": [ { "Model": "bge-small-en-v1.5-instruct" } ] }, "Reranking": { "map": [ { "Model": "bge-small-en-v1.5-instruct" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "bge-small-en-v1.5-instruct", "ARCChallenge": 7.72, "AlphaNLI": 1.26, "HellaSwag": 23.41, "PIQA": 20.79, "Quail": 2.01, "RARbCode": 41.52, "RARbMath": 46.5, "SIQA": 0.98, "SpartQA": 2.86, "TempReasonL1": 1.27, "TempReasonL2Fact": 16.72, "TempReasonL2Pure": 1.1, "TempReasonL3Fact": 12.81, "TempReasonL3Pure": 4.63, "WinoGrande": 5.35 } ] }, "STS": { "spearman": [ { "Model": "bge-small-en-v1.5-instruct" } ] }, "Summarization": { "spearman": [ { "Model": "bge-small-en-v1.5-instruct" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "bge-small-en-v1.5-instruct" } ] } }, "m3e-large": { "BitextMining": { "f1": [ { "Model": "m3e-large" } ] }, "Classification": { "accuracy": [ { "Model": "m3e-large", "AmazonReviewsClassification (zh)": 44.44, "IFlyTek": 43.96, "JDReview": 86.92, "MassiveIntentClassification (zh-CN)": 67.23, "MassiveScenarioClassification (zh-CN)": 74.88, "MultilingualSentiment": 72.47, "OnlineShopping": 89.59, "TNews": 48.26, "Waimai": 86.08 } ] }, "Clustering": { "v_measure": [ { "Model": "m3e-large", "CLSClusteringP2P": 38.6, "CLSClusteringS2S": 38.02, "ThuNewsClusteringP2P": 60.39, "ThuNewsClusteringS2S": 58.51 } ] }, "PairClassification": { "ap": [ { "Model": "m3e-large", "Cmnli": 69.27, "Ocnli": 59.33 } ] }, "Reranking": { "map": [ { "Model": "m3e-large", "CMedQAv1": 77.76, "CMedQAv2": 78.27, "MMarcoReranking": 16.46, "T2Reranking": 66.13 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "m3e-large", "CmedqaRetrieval": 30.73, "CovidRetrieval": 61.33, "DuRetrieval": 74.69, "EcomRetrieval": 45.18, "MMarcoRetrieval": 61.06, "MedicalRetrieval": 48.66, "T2Retrieval": 72.36, "VideoRetrieval": 44.02 } ] }, "STS": { "spearman": [ { "Model": "m3e-large", "AFQMC": 36.53, "ATEC": 41.8, "BQ": 65.2, "LCQMC": 74.2, "PAWSX": 15.95, "QBQTC": 32.65, "STS22 (zh)": 62.91, "STSB": 74.16 } ] }, "Summarization": { "spearman": [ { "Model": "m3e-large" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "m3e-large" } ] } }, "nomic-embed-text-v1.5-128": { "BitextMining": { "f1": [ { "Model": "nomic-embed-text-v1.5-128" } ] }, "Classification": { "accuracy": [ { "Model": "nomic-embed-text-v1.5-128", "AmazonCounterfactualClassification (en)": 69.78, "AmazonPolarityClassification": 88.74, "AmazonReviewsClassification (en)": 43.11, "Banking77Classification": 82.78, "EmotionClassification": 42.92, "ImdbClassification": 80.87, "MTOPDomainClassification (en)": 89.61, "MTOPIntentClassification (en)": 68.9, "MassiveIntentClassification (en)": 69.34, "MassiveScenarioClassification (en)": 74.21, "ToxicConversationsClassification": 68.16, "TweetSentimentExtractionClassification": 57.99 } ] }, "Clustering": { "v_measure": [ { "Model": "nomic-embed-text-v1.5-128", "ArxivClusteringP2P": 43.87, "ArxivClusteringS2S": 34.57, "BiorxivClusteringP2P": 36.79, "BiorxivClusteringS2S": 30.68, "MedrxivClusteringP2P": 34.09, "MedrxivClusteringS2S": 31.3, "RedditClustering": 53.31, "RedditClusteringP2P": 58.96, "StackExchangeClustering": 59.92, "StackExchangeClusteringP2P": 33.88, "TwentyNewsgroupsClustering": 47.29 } ] }, "PairClassification": { "ap": [ { "Model": "nomic-embed-text-v1.5-128", "SprintDuplicateQuestions": 91.45, "TwitterSemEval2015": 73.23, "TwitterURLCorpus": 85.93 } ] }, "Reranking": { "map": [ { "Model": "nomic-embed-text-v1.5-128", "AskUbuntuDupQuestions": 61.16, "MindSmallReranking": 30.02, "SciDocsRR": 78.05, "StackOverflowDupQuestions": 49.0 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "nomic-embed-text-v1.5-128", "ArguAna": 43.4, "CQADupstackRetrieval": 34.67, "ClimateFEVER": 36.52, "DBPedia": 36.22, "FEVER": 80.48, "FiQA2018": 32.08, "HotpotQA": 60.09, "MSMARCO": 39.99, "NFCorpus": 30.72, "NQ": 53.62, "QuoraRetrieval": 87.07, "SCIDOCS": 15.56, "SciFact": 64.28, "TRECCOVID": 74.58, "Touche2020": 26.99 } ] }, "STS": { "spearman": [ { "Model": "nomic-embed-text-v1.5-128", "BIOSSES": 80.19, "SICK-R": 79.09, "STS12": 77.49, "STS13": 85.62, "STS14": 80.5, "STS15": 85.84, "STS16": 83.9, "STS17 (en-en)": 86.27, "STS22 (en)": 64.24, "STSBenchmark": 84.28 } ] }, "Summarization": { "spearman": [ { "Model": "nomic-embed-text-v1.5-128", "SummEval": 29.59 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "nomic-embed-text-v1.5-128" } ] } }, "dragon-plus-instruct": { "BitextMining": { "f1": [ { "Model": "dragon-plus-instruct" } ] }, "Classification": { "accuracy": [ { "Model": "dragon-plus-instruct" } ] }, "Clustering": { "v_measure": [ { "Model": "dragon-plus-instruct" } ] }, "PairClassification": { "ap": [ { "Model": "dragon-plus-instruct" } ] }, "Reranking": { "map": [ { "Model": "dragon-plus-instruct" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "dragon-plus-instruct", "ARCChallenge": 8.24, "AlphaNLI": 25.18, "HellaSwag": 24.06, "PIQA": 26.35, "Quail": 4.2, "RARbCode": 12.84, "RARbMath": 36.15, "SIQA": 1.75, "SpartQA": 10.82, "TempReasonL1": 1.54, "TempReasonL2Fact": 16.11, "TempReasonL2Pure": 0.57, "TempReasonL3Fact": 14.81, "TempReasonL3Pure": 7.46, "WinoGrande": 60.84 } ] }, "STS": { "spearman": [ { "Model": "dragon-plus-instruct" } ] }, "Summarization": { "spearman": [ { "Model": "dragon-plus-instruct" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "dragon-plus-instruct" } ] } }, "st-polish-paraphrase-from-mpnet": { "BitextMining": { "f1": [ { "Model": "st-polish-paraphrase-from-mpnet" } ] }, "Classification": { "accuracy": [ { "Model": "st-polish-paraphrase-from-mpnet", "AllegroReviews": 34.55, "CBD": 67.48, "MassiveIntentClassification (pl)": 65.93, "MassiveScenarioClassification (pl)": 71.85, "PAC": 63.25, "PolEmo2.0-IN": 68.37, "PolEmo2.0-OUT": 30.99 } ] }, "Clustering": { "v_measure": [ { "Model": "st-polish-paraphrase-from-mpnet", "8TagsClustering": 33.15 } ] }, "PairClassification": { "ap": [ { "Model": "st-polish-paraphrase-from-mpnet", "CDSC-E": 75.06, "PPC": 93.49, "PSC": 99.05, "SICK-E-PL": 80.56 } ] }, "Reranking": { "map": [ { "Model": "st-polish-paraphrase-from-mpnet" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "st-polish-paraphrase-from-mpnet", "ArguAna-PL": 51.87, "DBPedia-PL": 24.59, "FiQA-PL": 22.27, "HotpotQA-PL": 32.11, "MSMARCO-PL": 17.91, "NFCorpus-PL": 24.05, "NQ-PL": 23.54, "Quora-PL": 81.49, "SCIDOCS-PL": 13.23, "SciFact-PL": 52.51, "TRECCOVID-PL": 35.23 } ] }, "STS": { "spearman": [ { "Model": "st-polish-paraphrase-from-mpnet", "CDSC-R": 88.55, "SICK-R-PL": 76.18, "STS22 (pl)": 37.34 } ] }, "Summarization": { "spearman": [ { "Model": "st-polish-paraphrase-from-mpnet" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "st-polish-paraphrase-from-mpnet" } ] } }, "USER-bge-m3": { "BitextMining": { "f1": [ { "Model": "USER-bge-m3", "Tatoeba (rus-Cyrl_eng-Latn)": 93.52 } ] }, "Classification": { "accuracy": [ { "Model": "USER-bge-m3", "GeoreviewClassification (rus-Cyrl)": 50.98, "HeadlineClassification (rus-Cyrl)": 70.09, "InappropriatenessClassification (rus-Cyrl)": 60.76, "KinopoiskClassification (rus-Cyrl)": 63.33, "MassiveIntentClassification (rus-Cyrl)": 68.85, "MassiveScenarioClassification (rus-Cyrl)": 72.9, "RuReviewsClassification (rus-Cyrl)": 68.52, "RuSciBenchGRNTIClassification (rus-Cyrl)": 57.67, "RuSciBenchOECDClassification (rus-Cyrl)": 44.2 } ] }, "Clustering": { "v_measure": [ { "Model": "USER-bge-m3", "GeoreviewClusteringP2P (rus-Cyrl)": 62.79, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 53.11, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 44.93 } ] }, "PairClassification": { "ap": [ { "Model": "USER-bge-m3", "OpusparcusPC (rus-Cyrl)": 90.73, "TERRa (rus-Cyrl)": 64.99 } ] }, "Reranking": { "map": [ { "Model": "USER-bge-m3", "RuBQReranking (rus-Cyrl)": 73.08 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "USER-bge-m3", "RiaNewsRetrieval (rus-Cyrl)": 83.53, "RuBQRetrieval (rus-Cyrl)": 70.03 } ] }, "STS": { "spearman": [ { "Model": "USER-bge-m3", "RUParaPhraserSTS (rus-Cyrl)": 76.36, "RuSTSBenchmarkSTS (rus-Cyrl)": 83.35, "STS22 (rus-Cyrl)": 66.42, "STSBenchmarkMultilingualSTS (rus-Cyrl)": 82.96 } ] }, "Summarization": { "spearman": [ { "Model": "USER-bge-m3" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "USER-bge-m3" } ] } }, "Cohere-embed-multilingual-v3.0": { "BitextMining": { "f1": [ { "Model": "Cohere-embed-multilingual-v3.0" } ] }, "Classification": { "accuracy": [ { "Model": "Cohere-embed-multilingual-v3.0", "AmazonReviewsClassification (fr)": 41.89, "MTOPDomainClassification (fr)": 86.23, "MTOPIntentClassification (fr)": 61.07, "MasakhaNEWSClassification (fra)": 83.06, "MassiveIntentClassification (fr)": 62.94, "MassiveScenarioClassification (fr)": 67.29 } ] }, "Clustering": { "v_measure": [ { "Model": "Cohere-embed-multilingual-v3.0", "AlloProfClusteringP2P": 63.53, "AlloProfClusteringS2S": 36.18, "HALClusteringS2S": 19.9, "MLSUMClusteringP2P": 45.08, "MLSUMClusteringS2S": 34.75, "MasakhaNEWSClusteringP2P (fra)": 53.18, "MasakhaNEWSClusteringS2S (fra)": 32.31 } ] }, "PairClassification": { "ap": [ { "Model": "Cohere-embed-multilingual-v3.0", "OpusparcusPC (fr)": 94.08, "PawsXPairClassification (fr)": 61.26 } ] }, "Reranking": { "map": [ { "Model": "Cohere-embed-multilingual-v3.0", "AlloprofReranking": 51.01, "SyntecReranking": 85.72 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "Cohere-embed-multilingual-v3.0", "AlloprofRetrieval": 38.36, "BSARDRetrieval": 0.14, "MintakaRetrieval (fr)": 25.44, "SyntecRetrieval": 79.27, "XPQARetrieval (fr)": 58.87 } ] }, "STS": { "spearman": [ { "Model": "Cohere-embed-multilingual-v3.0", "SICKFr": 79.23, "STS22 (fr)": 82.76, "STSBenchmarkMultilingualSTS (fr)": 81.84 } ] }, "Summarization": { "spearman": [ { "Model": "Cohere-embed-multilingual-v3.0", "SummEvalFr": 31.26 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "Cohere-embed-multilingual-v3.0" } ] } }, "multilingual-e5-large": { "BitextMining": { "f1": [ { "Model": "multilingual-e5-large", "BornholmBitextMining (dan-Latn)": 29.61, "BornholmBitextMining": 44.16, "Tatoeba (tgl-Latn_eng-Latn)": 92.0, "Tatoeba (gsw-Latn_eng-Latn)": 51.65, "Tatoeba (tzl-Latn_eng-Latn)": 53.16, "Tatoeba (slv-Latn_eng-Latn)": 89.57, "Tatoeba (jav-Latn_eng-Latn)": 75.46, "Tatoeba (uig-Arab_eng-Latn)": 72.17, "Tatoeba (ind-Latn_eng-Latn)": 92.9, "Tatoeba (rus-Cyrl_eng-Latn)": 92.32, "Tatoeba (war-Latn_eng-Latn)": 62.02, "Tatoeba (mar-Deva_eng-Latn)": 88.58, "Tatoeba (mkd-Cyrl_eng-Latn)": 85.63, "Tatoeba (jpn-Jpan_eng-Latn)": 95.28, "Tatoeba (hun-Latn_eng-Latn)": 94.01, "Tatoeba (slk-Latn_eng-Latn)": 93.13, "Tatoeba (tha-Thai_eng-Latn)": 95.38, "Tatoeba (fra-Latn_eng-Latn)": 93.42, "Tatoeba (ukr-Cyrl_eng-Latn)": 93.32, "Tatoeba (kat-Geor_eng-Latn)": 84.09, "Tatoeba (nov-Latn_eng-Latn)": 71.62, "Tatoeba (kor-Hang_eng-Latn)": 90.65, "Tatoeba (ben-Beng_eng-Latn)": 83.02, "Tatoeba (cor-Latn_eng-Latn)": 6.28, "Tatoeba (lfn-Latn_eng-Latn)": 62.91, "Tatoeba (swh-Latn_eng-Latn)": 71.61, "Tatoeba (tur-Latn_eng-Latn)": 96.27, "Tatoeba (cbk-Latn_eng-Latn)": 69.26, "Tatoeba (kur-Latn_eng-Latn)": 66.83, "Tatoeba (arq-Arab_eng-Latn)": 41.56, "Tatoeba (ceb-Latn_eng-Latn)": 55.31, "Tatoeba (max-Deva_eng-Latn)": 63.41, "Tatoeba (ang-Latn_eng-Latn)": 40.18, "Tatoeba (nds-Latn_eng-Latn)": 69.28, "Tatoeba (epo-Latn_eng-Latn)": 96.01, "Tatoeba (heb-Hebr_eng-Latn)": 86.61, "Tatoeba (yue-Hant_eng-Latn)": 88.71, "Tatoeba (dan-Latn_eng-Latn)": 95.08, "Tatoeba (swe-Latn_eng-Latn)": 95.3, "Tatoeba (lvs-Latn_eng-Latn)": 90.06, "Tatoeba (ast-Latn_eng-Latn)": 81.76, "Tatoeba (dsb-Latn_eng-Latn)": 48.44, "Tatoeba (pes-Arab_eng-Latn)": 92.14, "Tatoeba (dtp-Latn_eng-Latn)": 7.03, "Tatoeba (tuk-Latn_eng-Latn)": 33.15, "Tatoeba (isl-Latn_eng-Latn)": 92.09, "Tatoeba (khm-Khmr_eng-Latn)": 59.96, "Tatoeba (pam-Latn_eng-Latn)": 9.32, "Tatoeba (tat-Cyrl_eng-Latn)": 73.51, "Tatoeba (bos-Latn_eng-Latn)": 92.86, "Tatoeba (spa-Latn_eng-Latn)": 97.1, "Tatoeba (kaz-Cyrl_eng-Latn)": 79.67, "Tatoeba (bel-Cyrl_eng-Latn)": 91.08, "Tatoeba (zsm-Latn_eng-Latn)": 94.53, "Tatoeba (cat-Latn_eng-Latn)": 91.03, "Tatoeba (urd-Arab_eng-Latn)": 89.21, "Tatoeba (mon-Cyrl_eng-Latn)": 87.53, "Tatoeba (tam-Taml_eng-Latn)": 88.23, "Tatoeba (fry-Latn_eng-Latn)": 63.43, "Tatoeba (nob-Latn_eng-Latn)": 97.2, "Tatoeba (tel-Telu_eng-Latn)": 91.34, "Tatoeba (hye-Armn_eng-Latn)": 90.92, "Tatoeba (awa-Deva_eng-Latn)": 72.27, "Tatoeba (hrv-Latn_eng-Latn)": 96.15, "Tatoeba (ile-Latn_eng-Latn)": 79.16, "Tatoeba (amh-Ethi_eng-Latn)": 80.69, "Tatoeba (orv-Cyrl_eng-Latn)": 39.87, "Tatoeba (ara-Arab_eng-Latn)": 85.48, "Tatoeba (ido-Latn_eng-Latn)": 83.52, "Tatoeba (hin-Deva_eng-Latn)": 94.48, "Tatoeba (por-Latn_eng-Latn)": 93.63, "Tatoeba (ron-Latn_eng-Latn)": 94.87, "Tatoeba (swg-Latn_eng-Latn)": 55.64, "Tatoeba (cmn-Hans_eng-Latn)": 95.28, "Tatoeba (pol-Latn_eng-Latn)": 96.6, "Tatoeba (bul-Cyrl_eng-Latn)": 92.93, "Tatoeba (ina-Latn_eng-Latn)": 93.47, "Tatoeba (bre-Latn_eng-Latn)": 11.1, "Tatoeba (wuu-Hans_eng-Latn)": 86.37, "Tatoeba (lit-Latn_eng-Latn)": 88.48, "Tatoeba (csb-Latn_eng-Latn)": 36.98, "Tatoeba (lat-Latn_eng-Latn)": 53.37, "Tatoeba (gle-Latn_eng-Latn)": 71.48, "Tatoeba (ita-Latn_eng-Latn)": 93.29, "Tatoeba (srp-Cyrl_eng-Latn)": 93.1, "Tatoeba (arz-Arab_eng-Latn)": 74.73, "Tatoeba (cym-Latn_eng-Latn)": 76.21, "Tatoeba (ber-Tfng_eng-Latn)": 38.9, "Tatoeba (xho-Latn_eng-Latn)": 80.87, "Tatoeba (uzb-Latn_eng-Latn)": 72.35, "Tatoeba (pms-Latn_eng-Latn)": 59.85, "Tatoeba (est-Latn_eng-Latn)": 85.03, "Tatoeba (deu-Latn_eng-Latn)": 99.07, "Tatoeba (yid-Hebr_eng-Latn)": 76.33, "Tatoeba (ell-Grek_eng-Latn)": 93.88, "Tatoeba (afr-Latn_eng-Latn)": 90.22, "Tatoeba (fao-Latn_eng-Latn)": 72.62, "Tatoeba (nld-Latn_eng-Latn)": 96.63, "Tatoeba (hsb-Latn_eng-Latn)": 58.9, "Tatoeba (aze-Latn_eng-Latn)": 87.61, "Tatoeba (kzj-Latn_eng-Latn)": 7.91, "Tatoeba (kab-Latn_eng-Latn)": 36.54, "Tatoeba (mal-Mlym_eng-Latn)": 97.7, "Tatoeba (mhr-Cyrl_eng-Latn)": 6.79, "Tatoeba (ces-Latn_eng-Latn)": 94.89, "Tatoeba (gla-Latn_eng-Latn)": 59.0, "Tatoeba (cha-Latn_eng-Latn)": 27.16, "Tatoeba (glg-Latn_eng-Latn)": 93.34, "Tatoeba (vie-Latn_eng-Latn)": 97.0, "Tatoeba (oci-Latn_eng-Latn)": 54.91, "Tatoeba (nno-Latn_eng-Latn)": 91.4, "Tatoeba (fin-Latn_eng-Latn)": 95.44, "Tatoeba (eus-Latn_eng-Latn)": 77.82, "Tatoeba (sqi-Latn_eng-Latn)": 94.7 } ] }, "Classification": { "accuracy": [ { "Model": "multilingual-e5-large", "AllegroReviews (pol-Latn)": 41.04, "AllegroReviews": 41.14, "AmazonCounterfactualClassification (en-ext)": 78.73, "AmazonCounterfactualClassification (en)": 78.67, "AmazonCounterfactualClassification (deu-Latn)": 68.66, "AmazonCounterfactualClassification (jpn-Jpan)": 78.8, "AmazonPolarityClassification": 93.26, "AmazonReviewsClassification (en)": 49.2, "AmazonReviewsClassification (deu-Latn)": 46.5, "AmazonReviewsClassification (spa-Latn)": 44.35, "AmazonReviewsClassification (fra-Latn)": 42.55, "AmazonReviewsClassification (jpn-Jpan)": 41.71, "AmazonReviewsClassification (cmn-Hans)": 38.87, "AmazonReviewsClassification (fr)": 41.91, "AngryTweetsClassification (dan-Latn)": 57.69, "AngryTweetsClassification": 54.95, "Banking77Classification": 75.88, "CBD (pol-Latn)": 69.84, "CBD": 69.9, "DKHateClassification": 66.02, "DanishPoliticalCommentsClassification (dan-Latn)": 39.43, "DanishPoliticalCommentsClassification": 38.27, "EmotionClassification": 47.58, "GeoreviewClassification (rus-Cyrl)": 49.69, "HeadlineClassification (rus-Cyrl)": 77.19, "IFlyTek (cmn-Hans)": 41.86, "IFlyTek": 45.47, "ImdbClassification": 90.23, "InappropriatenessClassification (rus-Cyrl)": 61.6, "JDReview (cmn-Hans)": 80.54, "JDReview": 80.99, "KinopoiskClassification (rus-Cyrl)": 56.59, "LccSentimentClassification (dan-Latn)": 61.53, "LccSentimentClassification": 59.6, "MTOPDomainClassification (en)": 91.81, "MTOPDomainClassification (deu-Latn)": 90.44, "MTOPDomainClassification (spa-Latn)": 88.34, "MTOPDomainClassification (fra-Latn)": 86.23, "MTOPDomainClassification (hin-Deva)": 86.84, "MTOPDomainClassification (tha-Thai)": 86.88, "MTOPDomainClassification (fr)": 86.41, "MTOPIntentClassification (en)": 64.29, "MTOPIntentClassification (deu-Latn)": 65.97, "MTOPIntentClassification (spa-Latn)": 61.9, "MTOPIntentClassification (fra-Latn)": 56.25, "MTOPIntentClassification (hin-Deva)": 59.17, "MTOPIntentClassification (tha-Thai)": 62.59, "MTOPIntentClassification (fr)": 59.43, "MasakhaNEWSClassification (amh-Ethi)": 83.7, "MasakhaNEWSClassification (eng)": 78.26, "MasakhaNEWSClassification (fra-Latn)": 76.11, "MasakhaNEWSClassification (hau-Latn)": 76.17, "MasakhaNEWSClassification (ibo-Latn)": 70.05, "MasakhaNEWSClassification (lin-Latn)": 75.89, "MasakhaNEWSClassification (lug-Latn)": 73.63, "MasakhaNEWSClassification (orm-Ethi)": 80.31, "MasakhaNEWSClassification (pcm-Latn)": 89.15, "MasakhaNEWSClassification (run-Latn)": 76.55, "MasakhaNEWSClassification (sna-Latn)": 86.99, "MasakhaNEWSClassification (som-Latn)": 64.63, "MasakhaNEWSClassification (swa-Latn)": 73.42, "MasakhaNEWSClassification (tir-Ethi)": 72.06, "MasakhaNEWSClassification (xho-Latn)": 82.56, "MasakhaNEWSClassification (yor-Latn)": 81.09, "MasakhaNEWSClassification (fra)": 79.38, "MassiveIntentClassification (kor-Kore)": 63.92, "MassiveIntentClassification (lav-Latn)": 58.31, "MassiveIntentClassification (isl-Latn)": 53.3, "MassiveIntentClassification (tel-Telu)": 53.96, "MassiveIntentClassification (mya-Mymr)": 49.73, "MassiveIntentClassification (nob-Latn)": 64.54, "MassiveIntentClassification (en)": 68.51, "MassiveIntentClassification (spa-Latn)": 64.01, "MassiveIntentClassification (swe-Latn)": 66.52, "MassiveIntentClassification (cmo-Hant)": 58.78, "MassiveIntentClassification (pol-Latn)": 65.09, "MassiveIntentClassification (rus-Cyrl)": 65.76, "MassiveIntentClassification (aze-Latn)": 54.68, "MassiveIntentClassification (fin-Latn)": 64.28, "MassiveIntentClassification (cmo-Hans)": 66.23, "MassiveIntentClassification (urd-Arab)": 54.6, "MassiveIntentClassification (tam-Taml)": 53.41, "MassiveIntentClassification (hin-Deva)": 60.93, "MassiveIntentClassification (deu-Latn)": 63.82, "MassiveIntentClassification (ell-Grek)": 64.34, "MassiveIntentClassification (hye-Armn)": 50.89, "MassiveIntentClassification (por-Latn)": 65.6, "MassiveIntentClassification (nld-Latn)": 65.0, "MassiveIntentClassification (fas-Arab)": 63.74, "MassiveIntentClassification (ron-Latn)": 59.76, "MassiveIntentClassification (slv-Latn)": 59.38, "MassiveIntentClassification (heb-Hebr)": 62.44, "MassiveIntentClassification (vie-Latn)": 63.39, "MassiveIntentClassification (sqi-Latn)": 57.3, "MassiveIntentClassification (khm-Khmr)": 34.88, "MassiveIntentClassification (ben-Beng)": 55.6, "MassiveIntentClassification (tgl-Latn)": 54.77, "MassiveIntentClassification (jpn-Jpan)": 67.11, "MassiveIntentClassification (kat-Geor)": 41.45, "MassiveIntentClassification (afr-Latn)": 53.69, "MassiveIntentClassification (cym-Latn)": 44.22, "MassiveIntentClassification (amh-Ethi)": 45.48, "MassiveIntentClassification (ita-Latn)": 63.89, "MassiveIntentClassification (mal-Mlym)": 57.58, "MassiveIntentClassification (tha-Thai)": 62.75, "MassiveIntentClassification (ind-Latn)": 63.51, "MassiveIntentClassification (jav-Latn)": 48.96, "MassiveIntentClassification (dan-Latn)": 63.7, "MassiveIntentClassification (ara-Arab)": 54.1, "MassiveIntentClassification (kan-Knda)": 53.45, "MassiveIntentClassification (hun-Latn)": 64.0, "MassiveIntentClassification (tur-Latn)": 64.61, "MassiveIntentClassification (msa-Latn)": 58.49, "MassiveIntentClassification (mon-Cyrl)": 49.6, "MassiveIntentClassification (swa-Latn)": 47.69, "MassiveIntentClassification (fra-Latn)": 63.37, "MassiveIntentClassification (da)": 60.16, "MassiveIntentClassification (nb)": 59.83, "MassiveIntentClassification (sv)": 61.78, "MassiveIntentClassification (pl)": 65.07, "MassiveScenarioClassification (heb-Hebr)": 67.72, "MassiveScenarioClassification (vie-Latn)": 68.91, "MassiveScenarioClassification (cmo-Hant)": 64.35, "MassiveScenarioClassification (urd-Arab)": 60.89, "MassiveScenarioClassification (isl-Latn)": 60.74, "MassiveScenarioClassification (ell-Grek)": 69.74, "MassiveScenarioClassification (mon-Cyrl)": 55.37, "MassiveScenarioClassification (swa-Latn)": 56.27, "MassiveScenarioClassification (tam-Taml)": 58.76, "MassiveScenarioClassification (hye-Armn)": 55.76, "MassiveScenarioClassification (amh-Ethi)": 52.69, "MassiveScenarioClassification (ben-Beng)": 61.85, "MassiveScenarioClassification (tel-Telu)": 59.49, "MassiveScenarioClassification (dan-Latn)": 71.18, "MassiveScenarioClassification (slv-Latn)": 65.33, "MassiveScenarioClassification (en)": 73.04, "MassiveScenarioClassification (rus-Cyrl)": 70.85, "MassiveScenarioClassification (mal-Mlym)": 63.17, "MassiveScenarioClassification (sqi-Latn)": 63.79, "MassiveScenarioClassification (ita-Latn)": 69.45, "MassiveScenarioClassification (kor-Kore)": 70.54, "MassiveScenarioClassification (cmo-Hans)": 72.25, "MassiveScenarioClassification (cym-Latn)": 51.25, "MassiveScenarioClassification (pol-Latn)": 69.83, "MassiveScenarioClassification (ind-Latn)": 69.43, "MassiveScenarioClassification (tur-Latn)": 68.12, "MassiveScenarioClassification (tgl-Latn)": 60.71, "MassiveScenarioClassification (hin-Deva)": 66.85, "MassiveScenarioClassification (spa-Latn)": 69.07, "MassiveScenarioClassification (lav-Latn)": 64.28, "MassiveScenarioClassification (mya-Mymr)": 54.03, "MassiveScenarioClassification (ara-Arab)": 61.0, "MassiveScenarioClassification (kan-Knda)": 59.36, "MassiveScenarioClassification (jav-Latn)": 56.24, "MassiveScenarioClassification (por-Latn)": 68.33, "MassiveScenarioClassification (tha-Thai)": 69.06, "MassiveScenarioClassification (aze-Latn)": 58.49, "MassiveScenarioClassification (fra-Latn)": 68.74, "MassiveScenarioClassification (ron-Latn)": 66.06, "MassiveScenarioClassification (nld-Latn)": 71.11, "MassiveScenarioClassification (fas-Arab)": 67.55, "MassiveScenarioClassification (deu-Latn)": 71.25, "MassiveScenarioClassification (nob-Latn)": 70.44, "MassiveScenarioClassification (msa-Latn)": 63.55, "MassiveScenarioClassification (afr-Latn)": 62.35, "MassiveScenarioClassification (hun-Latn)": 70.53, "MassiveScenarioClassification (swe-Latn)": 72.77, "MassiveScenarioClassification (kat-Geor)": 47.82, "MassiveScenarioClassification (jpn-Jpan)": 73.16, "MassiveScenarioClassification (khm-Khmr)": 41.14, "MassiveScenarioClassification (fin-Latn)": 68.62, "MassiveScenarioClassification (da)": 67.46, "MassiveScenarioClassification (nb)": 66.18, "MassiveScenarioClassification (sv)": 69.15, "MassiveScenarioClassification (pl)": 69.82, "MultilingualSentiment (cmn-Hans)": 70.81, "MultilingualSentiment": 68.58, "NoRecClassification (nob-Latn)": 58.43, "NoRecClassification": 62.76, "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 80.15, "NordicLangClassification": 82.29, "NorwegianParliament": 60.36, "OnlineShopping (cmn-Hans)": 90.45, "OnlineShopping": 90.81, "PAC (pol-Latn)": 70.33, "PAC": 70.37, "PolEmo2.0-IN (pol-Latn)": 77.06, "PolEmo2.0-IN": 77.06, "PolEmo2.0-OUT (pol-Latn)": 53.48, "PolEmo2.0-OUT": 53.38, "RuReviewsClassification (rus-Cyrl)": 65.28, "RuSciBenchGRNTIClassification (rus-Cyrl)": 58.2, "RuSciBenchOECDClassification (rus-Cyrl)": 43.91, "ScalaDaClassification": 50.77, "ScalaNbClassification": 50.44, "TNews (cmn-Hans)": 48.8, "TNews": 48.38, "ToxicConversationsClassification": 66.01, "TweetSentimentExtractionClassification": 62.8, "Waimai (cmn-Hans)": 86.3, "Waimai": 85.02 } ] }, "Clustering": { "v_measure": [ { "Model": "multilingual-e5-large", "8TagsClustering": 33.88, "AlloProfClusteringP2P": 62.99, "AlloProfClusteringS2S": 32.26, "BiorxivClusteringP2P": 35.5, "BiorxivClusteringS2S": 33.3, "CLSClusteringP2P": 40.68, "CLSClusteringS2S": 38.59, "GeoreviewClusteringP2P (rus-Cyrl)": 60.51, "HALClusteringS2S": 22.44, "MLSUMClusteringP2P (rus-Cyrl)": 42.79, "MLSUMClusteringP2P": 44.04, "MLSUMClusteringS2S (rus-Cyrl)": 44.32, "MLSUMClusteringS2S": 37.65, "MasakhaNEWSClusteringP2P (amh-Ethi)": 67.16, "MasakhaNEWSClusteringP2P (eng)": 61.1, "MasakhaNEWSClusteringP2P (fra-Latn)": 41.66, "MasakhaNEWSClusteringP2P (hau-Latn)": 60.7, "MasakhaNEWSClusteringP2P (ibo-Latn)": 48.41, "MasakhaNEWSClusteringP2P (lin-Latn)": 57.69, "MasakhaNEWSClusteringP2P (lug-Latn)": 71.95, "MasakhaNEWSClusteringP2P (orm-Ethi)": 60.14, "MasakhaNEWSClusteringP2P (pcm-Latn)": 80.84, "MasakhaNEWSClusteringP2P (run-Latn)": 59.91, "MasakhaNEWSClusteringP2P (sna-Latn)": 53.3, "MasakhaNEWSClusteringP2P (som-Latn)": 34.38, "MasakhaNEWSClusteringP2P (swa-Latn)": 33.25, "MasakhaNEWSClusteringP2P (tir-Ethi)": 54.21, "MasakhaNEWSClusteringP2P (xho-Latn)": 41.12, "MasakhaNEWSClusteringP2P (yor-Latn)": 36.22, "MasakhaNEWSClusteringP2P (fra)": 40.94, "MasakhaNEWSClusteringS2S (amh-Ethi)": 47.24, "MasakhaNEWSClusteringS2S (eng)": 53.93, "MasakhaNEWSClusteringS2S (fra-Latn)": 39.84, "MasakhaNEWSClusteringS2S (hau-Latn)": 19.24, "MasakhaNEWSClusteringS2S (ibo-Latn)": 28.88, "MasakhaNEWSClusteringS2S (lin-Latn)": 42.22, "MasakhaNEWSClusteringS2S (lug-Latn)": 43.63, "MasakhaNEWSClusteringS2S (orm-Ethi)": 26.29, "MasakhaNEWSClusteringS2S (pcm-Latn)": 59.77, "MasakhaNEWSClusteringS2S (run-Latn)": 51.46, "MasakhaNEWSClusteringS2S (sna-Latn)": 48.14, "MasakhaNEWSClusteringS2S (som-Latn)": 25.14, "MasakhaNEWSClusteringS2S (swa-Latn)": 7.28, "MasakhaNEWSClusteringS2S (tir-Ethi)": 50.51, "MasakhaNEWSClusteringS2S (xho-Latn)": 30.98, "MasakhaNEWSClusteringS2S (yor-Latn)": 34.09, "MasakhaNEWSClusteringS2S (fra)": 30.56, "MedrxivClusteringP2P": 31.7, "MedrxivClusteringS2S": 29.76, "RedditClustering": 46.91, "RedditClusteringP2P": 63.0, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 52.03, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 45.11, "StackExchangeClustering": 58.37, "StackExchangeClusteringP2P": 32.9, "ThuNewsClusteringP2P": 58.05, "ThuNewsClusteringS2S": 55.59, "TwentyNewsgroupsClustering": 39.4 } ] }, "PairClassification": { "ap": [ { "Model": "multilingual-e5-large", "CDSC-E (pol-Latn)": 74.47, "CDSC-E": 74.47, "Cmnli": 78.18, "Ocnli": 61.6, "OpusparcusPC (deu-Latn)": 97.27, "OpusparcusPC (en)": 98.74, "OpusparcusPC (fin-Latn)": 94.26, "OpusparcusPC (fra-Latn)": 93.68, "OpusparcusPC (rus-Cyrl)": 89.64, "OpusparcusPC (swe-Latn)": 94.98, "OpusparcusPC (fr)": 93.89, "PPC": 92.18, "PSC (pol-Latn)": 99.4, "PSC": 99.39, "PawsXPairClassification (deu-Latn)": 56.81, "PawsXPairClassification (en)": 62.97, "PawsXPairClassification (spa-Latn)": 56.85, "PawsXPairClassification (fra-Latn)": 58.68, "PawsXPairClassification (jpn-Hira)": 50.7, "PawsXPairClassification (kor-Hang)": 52.08, "PawsXPairClassification (cmn-Hans)": 56.82, "PawsXPairClassification (fr)": 58.5, "SICK-E-PL (pol-Latn)": 75.95, "SICK-E-PL": 75.96, "SprintDuplicateQuestions": 93.14, "TERRa (rus-Cyrl)": 58.4, "TwitterSemEval2015": 75.28, "TwitterURLCorpus": 85.83 } ] }, "Reranking": { "map": [ { "Model": "multilingual-e5-large", "AlloprofReranking (fra-Latn)": 69.44, "AlloprofReranking": 57.37, "AskUbuntuDupQuestions": 59.24, "CMedQAv1": 68.25, "CMedQAv2": 68.56, "MMarcoReranking (cmn-Hans)": 29.12, "MMarcoReranking": 21.34, "MindSmallReranking": 30.24, "RuBQReranking (rus-Cyrl)": 75.58, "SciDocsRR": 84.22, "StackOverflowDupQuestions": 50.14, "SyntecReranking (fra-Latn)": 85.45, "SyntecReranking": 86.9, "T2Reranking (cmn-Hans)": 66.32, "T2Reranking": 65.83 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "multilingual-e5-large", "AILACasedocs": 26.43, "AILAStatutes": 20.84, "ARCChallenge": 10.83, "AlloprofRetrieval (fra-Latn)": 39.34, "AlloprofRetrieval": 38.15, "AlphaNLI": 13.59, "ArguAna": 54.36, "ArguAna-PL (pol-Latn)": 52.99, "ArguAna-PL": 53.02, "BSARDRetrieval (fra-Latn)": 21.28, "BSARDRetrieval": 0.27, "CmedqaRetrieval (cmn-Hans)": 28.66, "CmedqaRetrieval": 28.67, "CovidRetrieval (cmn-Hans)": 75.61, "CovidRetrieval": 75.51, "DBPedia-PL": 35.82, "DuRetrieval (cmn-Hans)": 85.3, "DuRetrieval": 85.32, "EcomRetrieval (cmn-Hans)": 54.67, "EcomRetrieval": 54.75, "FiQA-PL (pol-Latn)": 32.97, "FiQA-PL": 33.0, "FiQA2018": 43.81, "GerDaLIRSmall (deu-Latn)": 15.72, "HellaSwag": 27.35, "HotpotQA-PL": 67.41, "LEMBNarrativeQARetrieval": 24.22, "LEMBNeedleRetrieval": 28.0, "LEMBPasskeyRetrieval": 38.25, "LEMBQMSumRetrieval": 24.26, "LEMBSummScreenFDRetrieval": 71.12, "LEMBWikimQARetrieval": 56.8, "LeCaRDv2 (zho-Hans)": 55.83, "LegalBenchConsumerContractsQA": 73.3, "LegalBenchCorporateLobbying": 89.72, "LegalQuAD (deu-Latn)": 43.17, "LegalSummarization": 62.1, "MMarcoRetrieval (cmn-Hans)": 79.2, "MMarcoRetrieval": 79.2, "MSMARCO-PL": 33.38, "MedicalRetrieval (cmn-Hans)": 51.44, "MedicalRetrieval": 51.44, "MintakaRetrieval (ara-Arab)": 26.5, "MintakaRetrieval (deu-Latn)": 32.77, "MintakaRetrieval (spa-Latn)": 34.23, "MintakaRetrieval (fra-Latn)": 34.24, "MintakaRetrieval (hin-Deva)": 27.45, "MintakaRetrieval (ita-Latn)": 33.84, "MintakaRetrieval (jpn-Hira)": 26.45, "MintakaRetrieval (por-Latn)": 35.9, "MintakaRetrieval (fr)": 25.2, "NFCorpus": 33.95, "NFCorpus-PL (pol-Latn)": 30.21, "NFCorpus-PL": 30.24, "NQ-PL": 52.79, "PIQA": 28.82, "Quail": 4.85, "Quora-PL": 83.65, "RARbCode": 58.92, "RARbMath": 67.32, "RiaNewsRetrieval (rus-Cyrl)": 80.67, "RuBQRetrieval (rus-Cyrl)": 74.11, "SCIDOCS": 17.45, "SCIDOCS-PL (pol-Latn)": 13.82, "SCIDOCS-PL": 13.81, "SIQA": 5.36, "SciFact": 70.42, "SciFact-PL (pol-Latn)": 65.66, "SciFact-PL": 65.66, "SpartQA": 5.64, "SyntecRetrieval (fra-Latn)": 82.39, "SyntecRetrieval": 81.07, "T2Retrieval (cmn-Hans)": 76.07, "T2Retrieval": 76.11, "TRECCOVID": 71.21, "TRECCOVID-PL (pol-Latn)": 69.9, "TRECCOVID-PL": 70.03, "TempReasonL1": 1.14, "TempReasonL2Fact": 42.97, "TempReasonL2Pure": 2.05, "TempReasonL3Fact": 38.22, "TempReasonL3Pure": 8.31, "Touche2020": 23.13, "VideoRetrieval (cmn-Hans)": 58.28, "VideoRetrieval": 58.25, "WinoGrande": 54.99, "XPQARetrieval (ara-Arab_ara-Arab)": 43.69, "XPQARetrieval (eng-Latn_ara-Arab)": 30.86, "XPQARetrieval (ara-Arab_eng-Latn)": 39.11, "XPQARetrieval (deu-Latn_deu-Latn)": 76.83, "XPQARetrieval (eng-Latn_deu-Latn)": 42.87, "XPQARetrieval (deu-Latn_eng-Latn)": 68.25, "XPQARetrieval (spa-Latn_spa-Latn)": 61.77, "XPQARetrieval (eng-Latn_spa-Latn)": 37.55, "XPQARetrieval (spa-Latn_eng-Latn)": 52.86, "XPQARetrieval (fra-Latn_fra-Latn)": 61.38, "XPQARetrieval (eng-Latn_fra-Latn)": 39.12, "XPQARetrieval (fra-Latn_eng-Latn)": 57.93, "XPQARetrieval (hin-Deva_hin-Deva)": 71.09, "XPQARetrieval (eng-Latn_hin-Deva)": 32.39, "XPQARetrieval (hin-Deva_eng-Latn)": 68.31, "XPQARetrieval (ita-Latn_ita-Latn)": 74.32, "XPQARetrieval (eng-Latn_ita-Latn)": 37.95, "XPQARetrieval (ita-Latn_eng-Latn)": 64.54, "XPQARetrieval (jpn-Hira_jpn-Hira)": 74.11, "XPQARetrieval (eng-Latn_jpn-Hira)": 38.31, "XPQARetrieval (jpn-Hira_eng-Latn)": 65.42, "XPQARetrieval (kor-Hang_kor-Hang)": 35.72, "XPQARetrieval (eng-Latn_kor-Hang)": 31.09, "XPQARetrieval (kor-Hang_eng-Latn)": 34.06, "XPQARetrieval (pol-Latn_pol-Latn)": 51.01, "XPQARetrieval (eng-Latn_pol-Latn)": 30.49, "XPQARetrieval (pol-Latn_eng-Latn)": 44.66, "XPQARetrieval (por-Latn_por-Latn)": 41.1, "XPQARetrieval (eng-Latn_por-Latn)": 22.03, "XPQARetrieval (por-Latn_eng-Latn)": 35.15, "XPQARetrieval (tam-Taml_tam-Taml)": 39.51, "XPQARetrieval (eng-Latn_tam-Taml)": 17.33, "XPQARetrieval (tam-Taml_eng-Latn)": 33.67, "XPQARetrieval (cmn-Hans_cmn-Hans)": 66.27, "XPQARetrieval (eng-Latn_cmn-Hans)": 26.24, "XPQARetrieval (cmn-Hans_eng-Latn)": 55.15, "XPQARetrieval (fr)": 66.15 } ] }, "STS": { "spearman": [ { "Model": "multilingual-e5-large", "AFQMC (cmn-Hans)": 33.01, "AFQMC": 33.02, "ATEC (cmn-Hans)": 39.8, "ATEC": 39.81, "BIOSSES": 82.49, "BQ (cmn-Hans)": 46.44, "BQ": 46.44, "CDSC-R (pol-Latn)": 91.0, "CDSC-R": 91.0, "LCQMC (cmn-Hans)": 75.95, "LCQMC": 75.95, "PAWSX (cmn-Hans)": 14.63, "PAWSX": 14.63, "QBQTC": 29.77, "RUParaPhraserSTS (rus-Cyrl)": 71.82, "RuSTSBenchmarkSTS (rus-Cyrl)": 83.15, "SICK-R": 80.23, "SICK-R-PL (pol-Latn)": 75.08, "SICK-R-PL": 75.08, "SICKFr (fra-Latn)": 78.81, "SICKFr": 78.78, "STS12": 80.02, "STS13": 81.55, "STS14": 77.72, "STS15": 89.31, "STS16": 85.79, "STS17 (en-en)": 88.12, "STS17 (spa-Latn)": 86.71, "STS17 (spa-Latn_eng-Latn)": 80.74, "STS17 (eng-Latn_ara-Arab)": 75.03, "STS17 (fra-Latn_eng-Latn)": 85.62, "STS17 (kor-Hang)": 82.27, "STS17 (ita-Latn_eng-Latn)": 84.52, "STS17 (ara-Arab)": 77.83, "STS17 (eng-Latn_tur-Latn)": 71.22, "STS17 (eng-Latn_deu-Latn)": 86.15, "STS17 (nld-Latn_eng-Latn)": 85.29, "STS22 (spa-Latn)": 64.6, "STS22 (spa-Latn_eng-Latn)": 72.51, "STS22 (deu-Latn_eng-Latn)": 56.59, "STS22 (cmn-Hans_eng-Latn)": 65.95, "STS22 (deu-Latn_pol-Latn)": 49.58, "STS22 (fra-Latn_pol-Latn)": 50.71, "STS22 (en)": 63.66, "STS22 (ara-Arab)": 56.95, "STS22 (spa-Latn_ita-Latn)": 68.92, "STS22 (tur-Latn)": 63.56, "STS22 (deu-Latn_fra-Latn)": 67.96, "STS22 (ita-Latn)": 76.99, "STS22 (cmn-Hans)": 66.82, "STS22 (rus-Cyrl)": 59.89, "STS22 (fra-Latn)": 76.77, "STS22 (pol-Latn_eng-Latn)": 65.54, "STS22 (deu-Latn)": 56.58, "STS22 (pol-Latn)": 34.65, "STS22 (zh)": 65.64, "STS22 (pl)": 34.66, "STSB (cmn-Hans)": 81.08, "STSB": 81.08, "STSBenchmark": 87.29, "STSBenchmarkMultilingualSTS (cmn-Hans)": 81.22, "STSBenchmarkMultilingualSTS (en)": 87.29, "STSBenchmarkMultilingualSTS (pol-Latn)": 81.06, "STSBenchmarkMultilingualSTS (nld-Latn)": 81.63, "STSBenchmarkMultilingualSTS (ita-Latn)": 81.75, "STSBenchmarkMultilingualSTS (rus-Cyrl)": 83.05, "STSBenchmarkMultilingualSTS (por-Latn)": 73.31, "STSBenchmarkMultilingualSTS (spa-Latn)": 83.81, "STSBenchmarkMultilingualSTS (fra-Latn)": 83.28, "STSBenchmarkMultilingualSTS (deu-Latn)": 84.27, "STSBenchmarkMultilingualSTS (fr)": 82.53 } ] }, "Summarization": { "spearman": [ { "Model": "multilingual-e5-large", "SummEval": 29.65, "SummEvalFr (fra-Latn)": 30.92, "SummEvalFr": 30.92 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "multilingual-e5-large" } ] } }, "text-embedding-3-large": { "BitextMining": { "f1": [ { "Model": "text-embedding-3-large" } ] }, "Classification": { "accuracy": [ { "Model": "text-embedding-3-large", "AmazonCounterfactualClassification (en)": 78.93, "AmazonPolarityClassification": 92.85, "AmazonReviewsClassification (en)": 48.7, "Banking77Classification": 85.69, "EmotionClassification": 51.58, "ImdbClassification": 87.67, "MTOPDomainClassification (en)": 95.36, "MTOPIntentClassification (en)": 75.07, "MassiveIntentClassification (en)": 74.64, "MassiveScenarioClassification (en)": 79.79, "ToxicConversationsClassification": 72.92, "TweetSentimentExtractionClassification": 62.22 } ] }, "Clustering": { "v_measure": [ { "Model": "text-embedding-3-large", "ArxivClusteringP2P": 49.01, "ArxivClusteringS2S": 44.45, "BiorxivClusteringP2P": 38.03, "BiorxivClusteringS2S": 36.53, "MedrxivClusteringP2P": 32.7, "MedrxivClusteringS2S": 31.27, "RedditClustering": 67.84, "RedditClusteringP2P": 67.96, "StackExchangeClustering": 76.26, "StackExchangeClusteringP2P": 36.88, "TwentyNewsgroupsClustering": 58.14 } ] }, "PairClassification": { "ap": [ { "Model": "text-embedding-3-large", "SprintDuplicateQuestions": 92.25, "TwitterSemEval2015": 77.13, "TwitterURLCorpus": 87.78 } ] }, "Reranking": { "map": [ { "Model": "text-embedding-3-large", "AskUbuntuDupQuestions": 65.03, "MindSmallReranking": 29.86, "SciDocsRR": 86.66, "StackOverflowDupQuestions": 55.08 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "text-embedding-3-large", "AILACasedocs": 39.0, "AILAStatutes": 41.31, "ARCChallenge": 23.98, "AlphaNLI": 37.27, "ArguAna": 58.05, "BrightRetrieval (theoremqa_questions)": 22.22, "BrightRetrieval (leetcode)": 23.65, "BrightRetrieval (earth_science)": 26.27, "BrightRetrieval (psychology)": 27.52, "BrightRetrieval (robotics)": 12.93, "BrightRetrieval (economics)": 19.98, "BrightRetrieval (stackoverflow)": 12.49, "BrightRetrieval (biology)": 23.67, "BrightRetrieval (theoremqa_theorems)": 9.25, "BrightRetrieval (pony)": 2.45, "BrightRetrieval (sustainable_living)": 20.32, "BrightRetrieval (aops)": 8.45, "CQADupstackRetrieval": 47.54, "ClimateFEVER": 30.27, "DBPedia": 44.76, "FEVER": 87.94, "FiQA2018": 55.0, "GerDaLIRSmall": 32.77, "HellaSwag": 34.12, "HotpotQA": 71.58, "LEMBNarrativeQARetrieval": 44.09, "LEMBNeedleRetrieval": 29.25, "LEMBPasskeyRetrieval": 63.0, "LEMBQMSumRetrieval": 32.49, "LEMBSummScreenFDRetrieval": 84.8, "LEMBWikimQARetrieval": 54.16, "LeCaRDv2": 57.2, "LegalBenchConsumerContractsQA": 79.39, "LegalBenchCorporateLobbying": 95.09, "LegalQuAD": 57.47, "LegalSummarization": 71.55, "MSMARCO": 40.24, "NFCorpus": 42.07, "NQ": 61.27, "PIQA": 41.96, "Quail": 10.15, "QuoraRetrieval": 89.05, "RARbCode": 89.64, "RARbMath": 90.08, "SCIDOCS": 23.11, "SIQA": 3.44, "SciFact": 77.77, "SpartQA": 7.51, "TRECCOVID": 79.56, "TempReasonL1": 2.13, "TempReasonL2Fact": 28.65, "TempReasonL2Pure": 10.34, "TempReasonL3Fact": 25.52, "TempReasonL3Pure": 15.28, "Touche2020": 23.35, "WinoGrande": 29.11 } ] }, "STS": { "spearman": [ { "Model": "text-embedding-3-large", "BIOSSES": 84.68, "SICK-R": 79.0, "STS12": 72.84, "STS13": 86.1, "STS14": 81.15, "STS15": 88.49, "STS16": 85.08, "STS17 (en-en)": 90.22, "STS22 (en)": 66.14, "STSBenchmark": 83.56 } ] }, "Summarization": { "spearman": [ { "Model": "text-embedding-3-large", "SummEval": 29.92 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "text-embedding-3-large", "Core17InstructionRetrieval": -0.2, "News21InstructionRetrieval": -2.03, "Robust04InstructionRetrieval": -5.81 } ] } }, "contriever": { "BitextMining": { "f1": [ { "Model": "contriever" } ] }, "Classification": { "accuracy": [ { "Model": "contriever" } ] }, "Clustering": { "v_measure": [ { "Model": "contriever" } ] }, "PairClassification": { "ap": [ { "Model": "contriever" } ] }, "Reranking": { "map": [ { "Model": "contriever" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "contriever", "ARCChallenge": 8.62, "AlphaNLI": 31.77, "HellaSwag": 17.73, "PIQA": 24.64, "Quail": 4.97, "RARbCode": 9.28, "RARbMath": 30.76, "SIQA": 1.27, "SpartQA": 10.94, "TempReasonL1": 1.93, "TempReasonL2Fact": 22.68, "TempReasonL2Pure": 1.12, "TempReasonL3Fact": 20.62, "TempReasonL3Pure": 7.8, "WinoGrande": 47.15 } ] }, "STS": { "spearman": [ { "Model": "contriever" } ] }, "Summarization": { "spearman": [ { "Model": "contriever" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "contriever" } ] } }, "rubert-tiny-turbo": { "BitextMining": { "f1": [ { "Model": "rubert-tiny-turbo", "Tatoeba (rus-Cyrl_eng-Latn)": 83.14 } ] }, "Classification": { "accuracy": [ { "Model": "rubert-tiny-turbo", "AmazonPolarityClassification": 68.36, "Banking77Classification": 59.86, "EmotionClassification": 29.5, "GeoreviewClassification (rus-Cyrl)": 41.36, "HeadlineClassification (rus-Cyrl)": 68.9, "ImdbClassification": 58.36, "InappropriatenessClassification (rus-Cyrl)": 59.11, "KinopoiskClassification (rus-Cyrl)": 50.47, "MassiveIntentClassification (cmo-Hans)": 5.21, "MassiveIntentClassification (kor-Kore)": 2.53, "MassiveIntentClassification (hin-Deva)": 2.56, "MassiveIntentClassification (kan-Knda)": 2.06, "MassiveIntentClassification (kat-Geor)": 2.64, "MassiveIntentClassification (amh-Ethi)": 2.28, "MassiveIntentClassification (mya-Mymr)": 3.96, "MassiveIntentClassification (ell-Grek)": 9.66, "MassiveIntentClassification (lav-Latn)": 22.32, "MassiveIntentClassification (mal-Mlym)": 2.39, "MassiveIntentClassification (mon-Cyrl)": 28.99, "MassiveIntentClassification (urd-Arab)": 2.45, "MassiveIntentClassification (fas-Arab)": 3.34, "MassiveIntentClassification (ron-Latn)": 31.72, "MassiveIntentClassification (isl-Latn)": 24.85, "MassiveIntentClassification (en)": 50.16, "MassiveIntentClassification (hun-Latn)": 25.52, "MassiveIntentClassification (fra-Latn)": 31.51, "MassiveIntentClassification (tha-Thai)": 3.74, "MassiveIntentClassification (deu-Latn)": 32.1, "MassiveIntentClassification (tur-Latn)": 27.56, "MassiveIntentClassification (por-Latn)": 34.35, "MassiveIntentClassification (sqi-Latn)": 32.38, "MassiveIntentClassification (cmo-Hant)": 6.81, "MassiveIntentClassification (hye-Armn)": 2.72, "MassiveIntentClassification (dan-Latn)": 33.95, "MassiveIntentClassification (afr-Latn)": 30.4, "MassiveIntentClassification (ara-Arab)": 3.8, "MassiveIntentClassification (jav-Latn)": 28.53, "MassiveIntentClassification (tel-Telu)": 2.21, "MassiveIntentClassification (tgl-Latn)": 32.02, "MassiveIntentClassification (swa-Latn)": 27.79, "MassiveIntentClassification (jpn-Jpan)": 5.61, "MassiveIntentClassification (msa-Latn)": 28.94, "MassiveIntentClassification (nob-Latn)": 32.3, "MassiveIntentClassification (fin-Latn)": 31.13, "MassiveIntentClassification (ind-Latn)": 33.56, "MassiveIntentClassification (cym-Latn)": 31.68, "MassiveIntentClassification (slv-Latn)": 31.39, "MassiveIntentClassification (spa-Latn)": 31.03, "MassiveIntentClassification (ben-Beng)": 3.08, "MassiveIntentClassification (swe-Latn)": 30.23, "MassiveIntentClassification (rus-Cyrl)": 57.98, "MassiveIntentClassification (aze-Latn)": 23.58, "MassiveIntentClassification (ita-Latn)": 35.24, "MassiveIntentClassification (pol-Latn)": 26.82, "MassiveIntentClassification (vie-Latn)": 23.72, "MassiveIntentClassification (tam-Taml)": 1.5, "MassiveIntentClassification (heb-Hebr)": 2.25, "MassiveIntentClassification (nld-Latn)": 32.44, "MassiveIntentClassification (khm-Khmr)": 5.14, "MassiveScenarioClassification (cmo-Hans)": 10.6, "MassiveScenarioClassification (kor-Kore)": 5.63, "MassiveScenarioClassification (hin-Deva)": 7.41, "MassiveScenarioClassification (kan-Knda)": 7.6, "MassiveScenarioClassification (kat-Geor)": 7.01, "MassiveScenarioClassification (amh-Ethi)": 7.68, "MassiveScenarioClassification (mya-Mymr)": 10.73, "MassiveScenarioClassification (ell-Grek)": 17.95, "MassiveScenarioClassification (lav-Latn)": 29.29, "MassiveScenarioClassification (mal-Mlym)": 6.92, "MassiveScenarioClassification (mon-Cyrl)": 33.7, "MassiveScenarioClassification (urd-Arab)": 8.53, "MassiveScenarioClassification (fas-Arab)": 6.62, "MassiveScenarioClassification (ron-Latn)": 40.02, "MassiveScenarioClassification (isl-Latn)": 33.1, "MassiveScenarioClassification (en)": 61.29, "MassiveScenarioClassification (hun-Latn)": 36.41, "MassiveScenarioClassification (fra-Latn)": 42.9, "MassiveScenarioClassification (tha-Thai)": 8.26, "MassiveScenarioClassification (deu-Latn)": 42.07, "MassiveScenarioClassification (tur-Latn)": 34.85, "MassiveScenarioClassification (por-Latn)": 40.79, "MassiveScenarioClassification (sqi-Latn)": 42.66, "MassiveScenarioClassification (cmo-Hant)": 11.93, "MassiveScenarioClassification (hye-Armn)": 8.78, "MassiveScenarioClassification (dan-Latn)": 43.69, "MassiveScenarioClassification (afr-Latn)": 40.84, "MassiveScenarioClassification (ara-Arab)": 11.86, "MassiveScenarioClassification (jav-Latn)": 37.23, "MassiveScenarioClassification (tel-Telu)": 6.91, "MassiveScenarioClassification (tgl-Latn)": 38.16, "MassiveScenarioClassification (swa-Latn)": 35.66, "MassiveScenarioClassification (jpn-Jpan)": 10.6, "MassiveScenarioClassification (msa-Latn)": 38.97, "MassiveScenarioClassification (nob-Latn)": 39.05, "MassiveScenarioClassification (fin-Latn)": 35.19, "MassiveScenarioClassification (ind-Latn)": 39.54, "MassiveScenarioClassification (cym-Latn)": 39.85, "MassiveScenarioClassification (slv-Latn)": 35.98, "MassiveScenarioClassification (spa-Latn)": 37.13, "MassiveScenarioClassification (ben-Beng)": 8.85, "MassiveScenarioClassification (swe-Latn)": 36.12, "MassiveScenarioClassification (rus-Cyrl)": 62.9, "MassiveScenarioClassification (aze-Latn)": 30.32, "MassiveScenarioClassification (ita-Latn)": 42.69, "MassiveScenarioClassification (pol-Latn)": 31.62, "MassiveScenarioClassification (vie-Latn)": 31.89, "MassiveScenarioClassification (tam-Taml)": 7.01, "MassiveScenarioClassification (heb-Hebr)": 7.61, "MassiveScenarioClassification (nld-Latn)": 40.94, "MassiveScenarioClassification (khm-Khmr)": 8.51, "RuReviewsClassification (rus-Cyrl)": 60.66, "RuSciBenchGRNTIClassification (rus-Cyrl)": 52.93, "RuSciBenchOECDClassification (rus-Cyrl)": 40.79, "ToxicConversationsClassification": 57.77, "TweetSentimentExtractionClassification": 55.3 } ] }, "Clustering": { "v_measure": [ { "Model": "rubert-tiny-turbo", "ArxivClusteringP2P": 24.83, "ArxivClusteringS2S": 16.68, "BiorxivClusteringP2P": 20.0, "BiorxivClusteringS2S": 12.67, "GeoreviewClusteringP2P (rus-Cyrl)": 59.71, "MLSUMClusteringP2P (rus-Cyrl)": 40.02, "MLSUMClusteringS2S (rus-Cyrl)": 41.36, "MedrxivClusteringP2P": 20.79, "MedrxivClusteringS2S": 18.18, "RedditClustering": 26.28, "RedditClusteringP2P": 40.48, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 47.55, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 41.44, "StackExchangeClustering": 33.51, "StackExchangeClusteringP2P": 27.98, "TwentyNewsgroupsClustering": 19.9 } ] }, "PairClassification": { "ap": [ { "Model": "rubert-tiny-turbo", "OpusparcusPC (rus-Cyrl)": 87.58, "TERRa (rus-Cyrl)": 56.09 } ] }, "Reranking": { "map": [ { "Model": "rubert-tiny-turbo", "RuBQReranking (rus-Cyrl)": 62.15 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "rubert-tiny-turbo", "AILACasedocs": 7.43, "AILAStatutes": 13.62, "ARCChallenge": 3.85, "AlphaNLI": 14.15, "ArguAna": 32.03, "ClimateFEVER": 5.56, "DBPedia": 9.61, "RiaNewsRetrieval (rus-Cyrl)": 51.27, "RuBQRetrieval (rus-Cyrl)": 51.73 } ] }, "STS": { "spearman": [ { "Model": "rubert-tiny-turbo", "RUParaPhraserSTS (rus-Cyrl)": 72.15, "RuSTSBenchmarkSTS (rus-Cyrl)": 78.48, "STS22 (cmn-Hans)": 32.83, "STS22 (deu-Latn_fra-Latn)": 17.5, "STS22 (pol-Latn_eng-Latn)": 42.08, "STS22 (rus-Cyrl)": 60.06, "STS22 (fra-Latn)": 42.0, "STS22 (deu-Latn)": 8.16, "STS22 (tur-Latn)": 15.46, "STS22 (deu-Latn_eng-Latn)": 21.55, "STS22 (ita-Latn)": 39.69, "STS22 (pol-Latn)": 9.71, "STS22 (fra-Latn_pol-Latn)": 39.44, "STS22 (deu-Latn_pol-Latn)": 25.53, "STS22 (ara-Arab)": 27.95, "STS22 (spa-Latn_eng-Latn)": 42.77, "STS22 (spa-Latn_ita-Latn)": 32.83, "STS22 (spa-Latn)": 45.31, "STS22 (cmn-Hans_eng-Latn)": 31.25, "STS22 (en)": 47.06, "STSBenchmarkMultilingualSTS (rus-Cyrl)": 78.12 } ] }, "Summarization": { "spearman": [ { "Model": "rubert-tiny-turbo" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "rubert-tiny-turbo" } ] } }, "LLM2Vec-Sheared-Llama-unsupervised": { "BitextMining": { "f1": [ { "Model": "LLM2Vec-Sheared-Llama-unsupervised" } ] }, "Classification": { "accuracy": [ { "Model": "LLM2Vec-Sheared-Llama-unsupervised", "AmazonCounterfactualClassification (en)": 72.93, "AmazonPolarityClassification": 74.28, "AmazonReviewsClassification (en)": 36.14, "Banking77Classification": 79.0, "EmotionClassification": 42.85, "ImdbClassification": 71.92, "MTOPDomainClassification (en)": 91.24, "MTOPIntentClassification (en)": 74.08, "MassiveIntentClassification (en)": 69.99, "MassiveScenarioClassification (en)": 75.15, "ToxicConversationsClassification": 68.4, "TweetSentimentExtractionClassification": 56.08 } ] }, "Clustering": { "v_measure": [ { "Model": "LLM2Vec-Sheared-Llama-unsupervised", "ArxivClusteringP2P": 42.92, "ArxivClusteringS2S": 35.2, "BiorxivClusteringP2P": 35.02, "BiorxivClusteringS2S": 27.21, "MedrxivClusteringP2P": 30.15, "MedrxivClusteringS2S": 26.96, "RedditClustering": 38.67, "RedditClusteringP2P": 53.42, "StackExchangeClustering": 59.35, "StackExchangeClusteringP2P": 31.47, "TwentyNewsgroupsClustering": 31.54 } ] }, "PairClassification": { "ap": [ { "Model": "LLM2Vec-Sheared-Llama-unsupervised", "SprintDuplicateQuestions": 77.36, "TwitterSemEval2015": 61.54, "TwitterURLCorpus": 77.73 } ] }, "Reranking": { "map": [ { "Model": "LLM2Vec-Sheared-Llama-unsupervised", "AskUbuntuDupQuestions": 52.7, "MindSmallReranking": 29.52, "SciDocsRR": 67.76, "StackOverflowDupQuestions": 40.82 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "LLM2Vec-Sheared-Llama-unsupervised", "ArguAna": 43.64, "CQADupstackRetrieval": 18.5, "ClimateFEVER": 18.95, "DBPedia": 13.21, "FEVER": 16.96, "FiQA2018": 16.99, "HotpotQA": 22.64, "MSMARCO": 7.03, "NFCorpus": 15.73, "NQ": 17.96, "QuoraRetrieval": 78.23, "SCIDOCS": 5.53, "SciFact": 38.31, "TRECCOVID": 56.04, "Touche2020": 19.17 } ] }, "STS": { "spearman": [ { "Model": "LLM2Vec-Sheared-Llama-unsupervised", "BIOSSES": 75.12, "SICK-R": 69.34, "STS12": 60.09, "STS13": 72.52, "STS14": 66.7, "STS15": 77.69, "STS16": 75.94, "STS17 (en-en)": 81.67, "STS22 (en)": 63.7, "STSBenchmark": 73.36 } ] }, "Summarization": { "spearman": [ { "Model": "LLM2Vec-Sheared-Llama-unsupervised", "SummEval": 31.23 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "LLM2Vec-Sheared-Llama-unsupervised" } ] } }, "sentence-t5-xl": { "BitextMining": { "f1": [ { "Model": "sentence-t5-xl", "BUCC (de-en)": 95.04, "BUCC (fr-en)": 94.96, "BUCC (ru-en)": 8.33, "BUCC (zh-en)": 1.3, "Tatoeba (afr-eng)": 41.84, "Tatoeba (amh-eng)": 0.03, "Tatoeba (ang-eng)": 37.87, "Tatoeba (ara-eng)": 0.61, "Tatoeba (arq-eng)": 0.74, "Tatoeba (arz-eng)": 0.42, "Tatoeba (ast-eng)": 65.41, "Tatoeba (awa-eng)": 1.46, "Tatoeba (aze-eng)": 8.79, "Tatoeba (bel-eng)": 5.76, "Tatoeba (ben-eng)": 0.01, "Tatoeba (ber-eng)": 5.92, "Tatoeba (bos-eng)": 16.12, "Tatoeba (bre-eng)": 6.12, "Tatoeba (bul-eng)": 9.06, "Tatoeba (cat-eng)": 57.4, "Tatoeba (cbk-eng)": 57.68, "Tatoeba (ceb-eng)": 12.56, "Tatoeba (ces-eng)": 9.47, "Tatoeba (cha-eng)": 27.13, "Tatoeba (cmn-eng)": 1.82, "Tatoeba (cor-eng)": 3.87, "Tatoeba (csb-eng)": 14.41, "Tatoeba (cym-eng)": 6.69, "Tatoeba (dan-eng)": 54.87, "Tatoeba (deu-eng)": 93.72, "Tatoeba (dsb-eng)": 14.74, "Tatoeba (dtp-eng)": 5.84, "Tatoeba (ell-eng)": 0.6, "Tatoeba (epo-eng)": 30.8, "Tatoeba (est-eng)": 5.39, "Tatoeba (eus-eng)": 11.9, "Tatoeba (fao-eng)": 28.08, "Tatoeba (fin-eng)": 6.81, "Tatoeba (fra-eng)": 85.29, "Tatoeba (fry-eng)": 38.68, "Tatoeba (gla-eng)": 2.96, "Tatoeba (gle-eng)": 3.74, "Tatoeba (glg-eng)": 70.0, "Tatoeba (gsw-eng)": 30.49, "Tatoeba (heb-eng)": 0.87, "Tatoeba (hin-eng)": 0.1, "Tatoeba (hrv-eng)": 17.43, "Tatoeba (hsb-eng)": 14.69, "Tatoeba (hun-eng)": 7.28, "Tatoeba (hye-eng)": 0.77, "Tatoeba (ido-eng)": 46.65, "Tatoeba (ile-eng)": 59.43, "Tatoeba (ina-eng)": 82.71, "Tatoeba (ind-eng)": 37.26, "Tatoeba (isl-eng)": 11.21, "Tatoeba (ita-eng)": 79.77, "Tatoeba (jav-eng)": 7.81, "Tatoeba (jpn-eng)": 0.91, "Tatoeba (kab-eng)": 2.23, "Tatoeba (kat-eng)": 1.48, "Tatoeba (kaz-eng)": 1.77, "Tatoeba (khm-eng)": 0.38, "Tatoeba (kor-eng)": 1.96, "Tatoeba (kur-eng)": 12.11, "Tatoeba (kzj-eng)": 6.13, "Tatoeba (lat-eng)": 27.84, "Tatoeba (lfn-eng)": 45.89, "Tatoeba (lit-eng)": 5.94, "Tatoeba (lvs-eng)": 8.11, "Tatoeba (mal-eng)": 0.59, "Tatoeba (mar-eng)": 0.03, "Tatoeba (max-eng)": 21.7, "Tatoeba (mhr-eng)": 0.68, "Tatoeba (mkd-eng)": 5.92, "Tatoeba (mon-eng)": 2.39, "Tatoeba (nds-eng)": 45.04, "Tatoeba (nld-eng)": 64.75, "Tatoeba (nno-eng)": 36.74, "Tatoeba (nob-eng)": 54.77, "Tatoeba (nov-eng)": 57.12, "Tatoeba (oci-eng)": 34.39, "Tatoeba (orv-eng)": 2.04, "Tatoeba (pam-eng)": 8.34, "Tatoeba (pes-eng)": 0.87, "Tatoeba (pms-eng)": 38.06, "Tatoeba (pol-eng)": 28.35, "Tatoeba (por-eng)": 83.61, "Tatoeba (ron-eng)": 65.27, "Tatoeba (rus-eng)": 30.42, "Tatoeba (slk-eng)": 13.19, "Tatoeba (slv-eng)": 13.49, "Tatoeba (spa-eng)": 89.18, "Tatoeba (sqi-eng)": 14.66, "Tatoeba (srp-eng)": 13.24, "Tatoeba (swe-eng)": 60.67, "Tatoeba (swg-eng)": 34.76, "Tatoeba (swh-eng)": 8.07, "Tatoeba (tam-eng)": 0.36, "Tatoeba (tat-eng)": 1.46, "Tatoeba (tel-eng)": 0.67, "Tatoeba (tgl-eng)": 25.22, "Tatoeba (tha-eng)": 1.58, "Tatoeba (tuk-eng)": 4.99, "Tatoeba (tur-eng)": 7.72, "Tatoeba (tzl-eng)": 38.49, "Tatoeba (uig-eng)": 0.87, "Tatoeba (ukr-eng)": 9.12, "Tatoeba (urd-eng)": 0.0, "Tatoeba (uzb-eng)": 5.48, "Tatoeba (vie-eng)": 8.45, "Tatoeba (war-eng)": 13.75, "Tatoeba (wuu-eng)": 1.44, "Tatoeba (xho-eng)": 9.15, "Tatoeba (yid-eng)": 0.28, "Tatoeba (yue-eng)": 0.98, "Tatoeba (zsm-eng)": 35.71 } ] }, "Classification": { "accuracy": [ { "Model": "sentence-t5-xl", "AmazonCounterfactualClassification (de)": 67.01, "AmazonCounterfactualClassification (en)": 76.01, "AmazonCounterfactualClassification (en-ext)": 77.29, "AmazonCounterfactualClassification (ja)": 45.61, "AmazonPolarityClassification": 93.17, "AmazonReviewsClassification (de)": 44.05, "AmazonReviewsClassification (en)": 48.18, "AmazonReviewsClassification (es)": 45.01, "AmazonReviewsClassification (fr)": 43.52, "AmazonReviewsClassification (ja)": 22.23, "AmazonReviewsClassification (zh)": 21.88, "Banking77Classification": 80.88, "EmotionClassification": 51.95, "ImdbClassification": 87.54, "MTOPDomainClassification (de)": 83.28, "MTOPDomainClassification (en)": 90.73, "MTOPDomainClassification (es)": 85.32, "MTOPDomainClassification (fr)": 85.14, "MTOPDomainClassification (hi)": 20.85, "MTOPDomainClassification (th)": 15.62, "MTOPIntentClassification (de)": 54.65, "MTOPIntentClassification (en)": 68.15, "MTOPIntentClassification (es)": 57.38, "MTOPIntentClassification (fr)": 54.39, "MTOPIntentClassification (hi)": 3.28, "MTOPIntentClassification (th)": 5.08, "MasakhaNEWSClassification (fra)": 80.09, "MassiveIntentClassification (af)": 40.17, "MassiveIntentClassification (am)": 2.18, "MassiveIntentClassification (ar)": 4.18, "MassiveIntentClassification (az)": 30.02, "MassiveIntentClassification (bn)": 2.6, "MassiveIntentClassification (cy)": 29.15, "MassiveIntentClassification (da)": 47.69, "MassiveIntentClassification (de)": 57.43, "MassiveIntentClassification (el)": 9.96, "MassiveIntentClassification (en)": 72.09, "MassiveIntentClassification (es)": 57.97, "MassiveIntentClassification (fa)": 3.6, "MassiveIntentClassification (fi)": 34.02, "MassiveIntentClassification (fr)": 60.99, "MassiveIntentClassification (he)": 2.51, "MassiveIntentClassification (hi)": 3.02, "MassiveIntentClassification (hu)": 31.66, "MassiveIntentClassification (hy)": 3.32, "MassiveIntentClassification (id)": 41.53, "MassiveIntentClassification (is)": 30.25, "MassiveIntentClassification (it)": 56.57, "MassiveIntentClassification (ja)": 3.5, "MassiveIntentClassification (jv)": 31.67, "MassiveIntentClassification (ka)": 2.79, "MassiveIntentClassification (km)": 5.43, "MassiveIntentClassification (kn)": 2.79, "MassiveIntentClassification (ko)": 2.67, "MassiveIntentClassification (lv)": 34.25, "MassiveIntentClassification (ml)": 2.98, "MassiveIntentClassification (mn)": 20.99, "MassiveIntentClassification (ms)": 37.43, "MassiveIntentClassification (my)": 4.02, "MassiveIntentClassification (nb)": 45.91, "MassiveIntentClassification (nl)": 50.51, "MassiveIntentClassification (pl)": 43.95, "MassiveIntentClassification (pt)": 57.95, "MassiveIntentClassification (ro)": 49.37, "MassiveIntentClassification (ru)": 33.46, "MassiveIntentClassification (sl)": 36.33, "MassiveIntentClassification (sq)": 37.65, "MassiveIntentClassification (sv)": 46.35, "MassiveIntentClassification (sw)": 30.6, "MassiveIntentClassification (ta)": 1.79, "MassiveIntentClassification (te)": 2.26, "MassiveIntentClassification (th)": 4.02, "MassiveIntentClassification (tl)": 38.92, "MassiveIntentClassification (tr)": 32.05, "MassiveIntentClassification (ur)": 2.7, "MassiveIntentClassification (vi)": 21.47, "MassiveIntentClassification (zh-CN)": 0.59, "MassiveIntentClassification (zh-TW)": 3.24, "MassiveScenarioClassification (af)": 50.81, "MassiveScenarioClassification (am)": 6.95, "MassiveScenarioClassification (ar)": 12.32, "MassiveScenarioClassification (az)": 38.79, "MassiveScenarioClassification (bn)": 8.0, "MassiveScenarioClassification (cy)": 33.91, "MassiveScenarioClassification (da)": 55.79, "MassiveScenarioClassification (de)": 65.33, "MassiveScenarioClassification (el)": 16.89, "MassiveScenarioClassification (en)": 73.26, "MassiveScenarioClassification (es)": 62.52, "MassiveScenarioClassification (fa)": 6.08, "MassiveScenarioClassification (fi)": 43.34, "MassiveScenarioClassification (fr)": 66.42, "MassiveScenarioClassification (he)": 7.55, "MassiveScenarioClassification (hi)": 7.44, "MassiveScenarioClassification (hu)": 40.85, "MassiveScenarioClassification (hy)": 9.25, "MassiveScenarioClassification (id)": 51.92, "MassiveScenarioClassification (is)": 40.09, "MassiveScenarioClassification (it)": 62.94, "MassiveScenarioClassification (ja)": 7.9, "MassiveScenarioClassification (jv)": 41.33, "MassiveScenarioClassification (ka)": 7.76, "MassiveScenarioClassification (km)": 9.19, "MassiveScenarioClassification (kn)": 8.36, "MassiveScenarioClassification (ko)": 6.13, "MassiveScenarioClassification (lv)": 40.7, "MassiveScenarioClassification (ml)": 6.98, "MassiveScenarioClassification (mn)": 27.0, "MassiveScenarioClassification (ms)": 46.9, "MassiveScenarioClassification (my)": 9.55, "MassiveScenarioClassification (nb)": 53.43, "MassiveScenarioClassification (nl)": 59.65, "MassiveScenarioClassification (pl)": 49.87, "MassiveScenarioClassification (pt)": 62.18, "MassiveScenarioClassification (ro)": 58.22, "MassiveScenarioClassification (ru)": 40.73, "MassiveScenarioClassification (sl)": 43.66, "MassiveScenarioClassification (sq)": 49.25, "MassiveScenarioClassification (sv)": 57.17, "MassiveScenarioClassification (sw)": 40.55, "MassiveScenarioClassification (ta)": 7.46, "MassiveScenarioClassification (te)": 7.03, "MassiveScenarioClassification (th)": 8.52, "MassiveScenarioClassification (tl)": 51.74, "MassiveScenarioClassification (tr)": 43.01, "MassiveScenarioClassification (ur)": 9.61, "MassiveScenarioClassification (vi)": 28.91, "MassiveScenarioClassification (zh-CN)": 5.86, "MassiveScenarioClassification (zh-TW)": 7.14, "ToxicConversationsClassification": 70.95, "TweetSentimentExtractionClassification": 61.21 } ] }, "Clustering": { "v_measure": [ { "Model": "sentence-t5-xl", "AlloProfClusteringP2P": 60.37, "AlloProfClusteringS2S": 40.76, "ArxivClusteringP2P": 41.62, "ArxivClusteringS2S": 31.17, "BiorxivClusteringP2P": 36.43, "BiorxivClusteringS2S": 26.47, "HALClusteringS2S": 20.28, "MLSUMClusteringP2P": 41.61, "MLSUMClusteringS2S": 33.6, "MasakhaNEWSClusteringP2P (fra)": 62.82, "MasakhaNEWSClusteringS2S (fra)": 31.74, "MedrxivClusteringP2P": 32.3, "MedrxivClusteringS2S": 26.93, "RedditClustering": 57.03, "RedditClusteringP2P": 62.34, "StackExchangeClustering": 67.13, "StackExchangeClusteringP2P": 34.79, "TwentyNewsgroupsClustering": 49.53 } ] }, "PairClassification": { "ap": [ { "Model": "sentence-t5-xl", "OpusparcusPC (fr)": 92.48, "PawsXPairClassification (fr)": 62.52, "SprintDuplicateQuestions": 91.44, "TwitterSemEval2015": 80.89, "TwitterURLCorpus": 85.86 } ] }, "Reranking": { "map": [ { "Model": "sentence-t5-xl", "AlloprofReranking": 63.3, "AskUbuntuDupQuestions": 62.86, "MindSmallReranking": 29.77, "SciDocsRR": 75.16, "StackOverflowDupQuestions": 51.05, "SyntecReranking": 83.07 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "sentence-t5-xl", "AlloprofRetrieval": 40.38, "ArguAna": 39.4, "BSARDRetrieval": 0.14, "CQADupstackRetrieval": 40.78, "ClimateFEVER": 10.61, "DBPedia": 33.65, "FEVER": 36.12, "FiQA2018": 44.71, "HotpotQA": 37.17, "MSMARCO": 25.17, "MintakaRetrieval (fr)": 31.54, "NFCorpus": 33.18, "NQ": 46.29, "QuoraRetrieval": 85.85, "SCIDOCS": 15.97, "SciFact": 50.91, "SyntecRetrieval": 74.24, "TRECCOVID": 54.77, "Touche2020": 22.51, "XPQARetrieval (fr)": 52.14 } ] }, "STS": { "spearman": [ { "Model": "sentence-t5-xl", "BIOSSES": 73.12, "SICK-R": 79.98, "SICKFr": 75.08, "STS12": 79.02, "STS13": 88.8, "STS14": 84.33, "STS15": 88.89, "STS16": 85.31, "STS17 (ar-ar)": 11.13, "STS17 (en-ar)": -3.93, "STS17 (en-de)": 79.04, "STS17 (en-en)": 88.91, "STS17 (en-tr)": 13.61, "STS17 (es-en)": 71.72, "STS17 (es-es)": 83.42, "STS17 (fr-en)": 71.38, "STS17 (it-en)": 69.5, "STS17 (ko-ko)": 9.61, "STS17 (nl-en)": 66.12, "STS22 (ar)": 29.6, "STS22 (de)": 47.72, "STS22 (de-en)": 49.64, "STS22 (de-fr)": 62.21, "STS22 (de-pl)": 34.34, "STS22 (en)": 64.32, "STS22 (es)": 58.16, "STS22 (es-en)": 69.15, "STS22 (es-it)": 65.26, "STS22 (fr)": 77.49, "STS22 (fr-pl)": 50.71, "STS22 (it)": 66.91, "STS22 (pl)": 27.04, "STS22 (pl-en)": 58.85, "STS22 (ru)": 26.63, "STS22 (tr)": 43.36, "STS22 (zh)": 33.55, "STS22 (zh-en)": 29.0, "STSBenchmark": 83.93, "STSBenchmarkMultilingualSTS (fr)": 79.42 } ] }, "Summarization": { "spearman": [ { "Model": "sentence-t5-xl", "SummEval": 29.91, "SummEvalFr": 31.59 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "sentence-t5-xl" } ] } }, "multilingual-e5-base": { "BitextMining": { "f1": [ { "Model": "multilingual-e5-base", "BornholmBitextMining (dan-Latn)": 33.22, "BornholmBitextMining": 46.4, "Tatoeba (kzj-Latn_eng-Latn)": 6.26, "Tatoeba (ina-Latn_eng-Latn)": 86.11, "Tatoeba (bre-Latn_eng-Latn)": 5.44, "Tatoeba (kab-Latn_eng-Latn)": 21.77, "Tatoeba (ind-Latn_eng-Latn)": 90.26, "Tatoeba (mkd-Cyrl_eng-Latn)": 73.76, "Tatoeba (yue-Hant_eng-Latn)": 80.66, "Tatoeba (amh-Ethi_eng-Latn)": 74.93, "Tatoeba (ceb-Latn_eng-Latn)": 45.46, "Tatoeba (lit-Latn_eng-Latn)": 75.53, "Tatoeba (nds-Latn_eng-Latn)": 53.86, "Tatoeba (kur-Latn_eng-Latn)": 52.96, "Tatoeba (bel-Cyrl_eng-Latn)": 86.7, "Tatoeba (ile-Latn_eng-Latn)": 72.56, "Tatoeba (oci-Latn_eng-Latn)": 35.79, "Tatoeba (heb-Hebr_eng-Latn)": 74.26, "Tatoeba (mhr-Cyrl_eng-Latn)": 5.52, "Tatoeba (afr-Latn_eng-Latn)": 87.04, "Tatoeba (uig-Arab_eng-Latn)": 62.97, "Tatoeba (mar-Deva_eng-Latn)": 86.62, "Tatoeba (fry-Latn_eng-Latn)": 50.82, "Tatoeba (tat-Cyrl_eng-Latn)": 66.92, "Tatoeba (khm-Khmr_eng-Latn)": 47.27, "Tatoeba (dtp-Latn_eng-Latn)": 5.13, "Tatoeba (ben-Beng_eng-Latn)": 81.05, "Tatoeba (ido-Latn_eng-Latn)": 74.41, "Tatoeba (cha-Latn_eng-Latn)": 16.95, "Tatoeba (zsm-Latn_eng-Latn)": 92.45, "Tatoeba (pes-Arab_eng-Latn)": 87.18, "Tatoeba (hye-Armn_eng-Latn)": 85.85, "Tatoeba (cat-Latn_eng-Latn)": 84.09, "Tatoeba (cym-Latn_eng-Latn)": 65.69, "Tatoeba (aze-Latn_eng-Latn)": 84.71, "Tatoeba (yid-Hebr_eng-Latn)": 63.2, "Tatoeba (swg-Latn_eng-Latn)": 42.33, "Tatoeba (war-Latn_eng-Latn)": 47.18, "Tatoeba (swe-Latn_eng-Latn)": 91.33, "Tatoeba (slk-Latn_eng-Latn)": 86.42, "Tatoeba (gla-Latn_eng-Latn)": 43.08, "Tatoeba (xho-Latn_eng-Latn)": 73.24, "Tatoeba (dan-Latn_eng-Latn)": 91.23, "Tatoeba (ara-Arab_eng-Latn)": 82.86, "Tatoeba (ast-Latn_eng-Latn)": 74.36, "Tatoeba (hrv-Latn_eng-Latn)": 92.5, "Tatoeba (nob-Latn_eng-Latn)": 95.9, "Tatoeba (eus-Latn_eng-Latn)": 56.26, "Tatoeba (kaz-Cyrl_eng-Latn)": 75.56, "Tatoeba (tuk-Latn_eng-Latn)": 19.67, "Tatoeba (pam-Latn_eng-Latn)": 6.92, "Tatoeba (gsw-Latn_eng-Latn)": 43.53, "Tatoeba (slv-Latn_eng-Latn)": 81.93, "Tatoeba (dsb-Latn_eng-Latn)": 34.36, "Tatoeba (cor-Latn_eng-Latn)": 4.38, "Tatoeba (ces-Latn_eng-Latn)": 88.75, "Tatoeba (tam-Taml_eng-Latn)": 85.12, "Tatoeba (glg-Latn_eng-Latn)": 82.69, "Tatoeba (bul-Cyrl_eng-Latn)": 88.95, "Tatoeba (deu-Latn_eng-Latn)": 97.07, "Tatoeba (fin-Latn_eng-Latn)": 86.15, "Tatoeba (csb-Latn_eng-Latn)": 24.29, "Tatoeba (urd-Arab_eng-Latn)": 86.2, "Tatoeba (est-Latn_eng-Latn)": 70.64, "Tatoeba (wuu-Hans_eng-Latn)": 78.65, "Tatoeba (tha-Thai_eng-Latn)": 94.22, "Tatoeba (spa-Latn_eng-Latn)": 96.97, "Tatoeba (ukr-Cyrl_eng-Latn)": 88.29, "Tatoeba (awa-Deva_eng-Latn)": 68.39, "Tatoeba (mal-Mlym_eng-Latn)": 96.72, "Tatoeba (cbk-Latn_eng-Latn)": 60.66, "Tatoeba (hsb-Latn_eng-Latn)": 40.36, "Tatoeba (tzl-Latn_eng-Latn)": 34.44, "Tatoeba (gle-Latn_eng-Latn)": 58.62, "Tatoeba (orv-Cyrl_eng-Latn)": 16.0, "Tatoeba (isl-Latn_eng-Latn)": 76.9, "Tatoeba (jav-Latn_eng-Latn)": 61.25, "Tatoeba (fao-Latn_eng-Latn)": 64.72, "Tatoeba (pol-Latn_eng-Latn)": 94.57, "Tatoeba (max-Deva_eng-Latn)": 52.4, "Tatoeba (bos-Latn_eng-Latn)": 88.86, "Tatoeba (hun-Latn_eng-Latn)": 84.41, "Tatoeba (rus-Cyrl_eng-Latn)": 91.78, "Tatoeba (arq-Arab_eng-Latn)": 26.61, "Tatoeba (kor-Hang_eng-Latn)": 83.37, "Tatoeba (uzb-Latn_eng-Latn)": 62.63, "Tatoeba (pms-Latn_eng-Latn)": 44.61, "Tatoeba (ell-Grek_eng-Latn)": 89.96, "Tatoeba (swh-Latn_eng-Latn)": 66.81, "Tatoeba (epo-Latn_eng-Latn)": 92.07, "Tatoeba (jpn-Jpan_eng-Latn)": 90.3, "Tatoeba (tel-Telu_eng-Latn)": 88.49, "Tatoeba (srp-Cyrl_eng-Latn)": 89.08, "Tatoeba (nov-Latn_eng-Latn)": 66.96, "Tatoeba (cmn-Hans_eng-Latn)": 93.35, "Tatoeba (tgl-Latn_eng-Latn)": 83.78, "Tatoeba (ber-Tfng_eng-Latn)": 23.59, "Tatoeba (sqi-Latn_eng-Latn)": 90.06, "Tatoeba (ang-Latn_eng-Latn)": 29.87, "Tatoeba (ita-Latn_eng-Latn)": 90.61, "Tatoeba (por-Latn_eng-Latn)": 92.74, "Tatoeba (mon-Cyrl_eng-Latn)": 78.37, "Tatoeba (fra-Latn_eng-Latn)": 92.76, "Tatoeba (lat-Latn_eng-Latn)": 39.62, "Tatoeba (nno-Latn_eng-Latn)": 82.67, "Tatoeba (arz-Arab_eng-Latn)": 66.79, "Tatoeba (hin-Deva_eng-Latn)": 93.13, "Tatoeba (nld-Latn_eng-Latn)": 93.2, "Tatoeba (kat-Geor_eng-Latn)": 77.83, "Tatoeba (lfn-Latn_eng-Latn)": 52.85, "Tatoeba (lvs-Latn_eng-Latn)": 76.76, "Tatoeba (tur-Latn_eng-Latn)": 92.54, "Tatoeba (ron-Latn_eng-Latn)": 91.27, "Tatoeba (vie-Latn_eng-Latn)": 94.55 } ] }, "Classification": { "accuracy": [ { "Model": "multilingual-e5-base", "AllegroReviews (pol-Latn)": 40.78, "AllegroReviews": 40.85, "AmazonCounterfactualClassification (en-ext)": 76.91, "AmazonCounterfactualClassification (en)": 77.36, "AmazonCounterfactualClassification (deu-Latn)": 70.81, "AmazonCounterfactualClassification (jpn-Jpan)": 72.02, "AmazonPolarityClassification": 91.76, "AmazonReviewsClassification (en)": 47.54, "AmazonReviewsClassification (deu-Latn)": 44.37, "AmazonReviewsClassification (spa-Latn)": 43.38, "AmazonReviewsClassification (fra-Latn)": 41.55, "AmazonReviewsClassification (jpn-Jpan)": 39.57, "AmazonReviewsClassification (cmn-Hans)": 38.34, "AmazonReviewsClassification (fr)": 40.94, "AngryTweetsClassification (dan-Latn)": 56.28, "AngryTweetsClassification": 54.65, "Banking77Classification": 73.53, "CBD (pol-Latn)": 62.6, "CBD": 62.66, "DKHateClassification": 63.53, "DanishPoliticalCommentsClassification (dan-Latn)": 36.41, "DanishPoliticalCommentsClassification": 36.69, "EmotionClassification": 45.68, "GeoreviewClassification (rus-Cyrl)": 46.05, "HeadlineClassification (rus-Cyrl)": 75.64, "IFlyTek (cmn-Hans)": 40.81, "IFlyTek": 44.93, "ImdbClassification": 84.29, "InappropriatenessClassification (rus-Cyrl)": 58.78, "JDReview (cmn-Hans)": 75.72, "JDReview": 76.21, "KinopoiskClassification (rus-Cyrl)": 50.89, "LccSentimentClassification (dan-Latn)": 60.13, "LccSentimentClassification": 59.67, "MTOPDomainClassification (en)": 90.9, "MTOPDomainClassification (deu-Latn)": 87.94, "MTOPDomainClassification (spa-Latn)": 85.96, "MTOPDomainClassification (fra-Latn)": 82.88, "MTOPDomainClassification (hin-Deva)": 83.92, "MTOPDomainClassification (tha-Thai)": 83.94, "MTOPDomainClassification (fr)": 84.79, "MTOPIntentClassification (en)": 61.6, "MTOPIntentClassification (deu-Latn)": 61.05, "MTOPIntentClassification (spa-Latn)": 55.36, "MTOPIntentClassification (fra-Latn)": 52.23, "MTOPIntentClassification (hin-Deva)": 53.93, "MTOPIntentClassification (tha-Thai)": 58.69, "MTOPIntentClassification (fr)": 55.51, "MasakhaNEWSClassification (amh-Ethi)": 83.8, "MasakhaNEWSClassification (eng)": 76.49, "MasakhaNEWSClassification (fra-Latn)": 76.35, "MasakhaNEWSClassification (hau-Latn)": 74.63, "MasakhaNEWSClassification (ibo-Latn)": 64.59, "MasakhaNEWSClassification (lin-Latn)": 70.57, "MasakhaNEWSClassification (lug-Latn)": 68.12, "MasakhaNEWSClassification (orm-Ethi)": 71.75, "MasakhaNEWSClassification (pcm-Latn)": 91.05, "MasakhaNEWSClassification (run-Latn)": 73.35, "MasakhaNEWSClassification (sna-Latn)": 84.17, "MasakhaNEWSClassification (som-Latn)": 60.1, "MasakhaNEWSClassification (swa-Latn)": 70.74, "MasakhaNEWSClassification (tir-Ethi)": 67.1, "MasakhaNEWSClassification (xho-Latn)": 76.03, "MasakhaNEWSClassification (yor-Latn)": 72.75, "MasakhaNEWSClassification (fra)": 79.69, "MassiveIntentClassification (tha-Thai)": 59.63, "MassiveIntentClassification (tam-Taml)": 48.93, "MassiveIntentClassification (fin-Latn)": 58.91, "MassiveIntentClassification (rus-Cyrl)": 62.78, "MassiveIntentClassification (afr-Latn)": 49.82, "MassiveIntentClassification (heb-Hebr)": 55.3, "MassiveIntentClassification (sqi-Latn)": 51.07, "MassiveIntentClassification (por-Latn)": 62.12, "MassiveIntentClassification (hye-Armn)": 48.77, "MassiveIntentClassification (cym-Latn)": 37.05, "MassiveIntentClassification (deu-Latn)": 59.82, "MassiveIntentClassification (fas-Arab)": 59.51, "MassiveIntentClassification (hun-Latn)": 57.69, "MassiveIntentClassification (urd-Arab)": 51.3, "MassiveIntentClassification (cmo-Hant)": 56.4, "MassiveIntentClassification (khm-Khmr)": 32.14, "MassiveIntentClassification (tel-Telu)": 50.09, "MassiveIntentClassification (vie-Latn)": 59.61, "MassiveIntentClassification (kan-Knda)": 48.63, "MassiveIntentClassification (ara-Arab)": 50.2, "MassiveIntentClassification (mya-Mymr)": 46.67, "MassiveIntentClassification (slv-Latn)": 53.84, "MassiveIntentClassification (jpn-Jpan)": 62.3, "MassiveIntentClassification (mon-Cyrl)": 46.8, "MassiveIntentClassification (jav-Latn)": 43.23, "MassiveIntentClassification (lav-Latn)": 51.17, "MassiveIntentClassification (ron-Latn)": 56.83, "MassiveIntentClassification (dan-Latn)": 60.69, "MassiveIntentClassification (nob-Latn)": 60.06, "MassiveIntentClassification (tgl-Latn)": 48.99, "MassiveIntentClassification (aze-Latn)": 51.36, "MassiveIntentClassification (ind-Latn)": 58.7, "MassiveIntentClassification (amh-Ethi)": 42.4, "MassiveIntentClassification (ben-Beng)": 51.69, "MassiveIntentClassification (ell-Grek)": 58.07, "MassiveIntentClassification (hin-Deva)": 56.75, "MassiveIntentClassification (nld-Latn)": 61.23, "MassiveIntentClassification (pol-Latn)": 60.98, "MassiveIntentClassification (swe-Latn)": 62.43, "MassiveIntentClassification (isl-Latn)": 44.52, "MassiveIntentClassification (mal-Mlym)": 53.75, "MassiveIntentClassification (msa-Latn)": 52.84, "MassiveIntentClassification (kat-Geor)": 37.56, "MassiveIntentClassification (tur-Latn)": 60.69, "MassiveIntentClassification (kor-Kore)": 59.97, "MassiveIntentClassification (ita-Latn)": 61.29, "MassiveIntentClassification (cmo-Hans)": 63.22, "MassiveIntentClassification (en)": 65.71, "MassiveIntentClassification (fra-Latn)": 61.32, "MassiveIntentClassification (swa-Latn)": 45.24, "MassiveIntentClassification (spa-Latn)": 61.13, "MassiveIntentClassification (da)": 60.16, "MassiveIntentClassification (nb)": 59.83, "MassiveIntentClassification (sv)": 61.78, "MassiveIntentClassification (pl)": 61.04, "MassiveScenarioClassification (ind-Latn)": 63.6, "MassiveScenarioClassification (tha-Thai)": 67.37, "MassiveScenarioClassification (cmo-Hans)": 70.24, "MassiveScenarioClassification (ben-Beng)": 57.0, "MassiveScenarioClassification (kan-Knda)": 53.49, "MassiveScenarioClassification (tel-Telu)": 54.24, "MassiveScenarioClassification (aze-Latn)": 55.15, "MassiveScenarioClassification (ell-Grek)": 65.38, "MassiveScenarioClassification (swa-Latn)": 52.64, "MassiveScenarioClassification (hin-Deva)": 62.91, "MassiveScenarioClassification (tur-Latn)": 65.18, "MassiveScenarioClassification (dan-Latn)": 67.97, "MassiveScenarioClassification (msa-Latn)": 58.35, "MassiveScenarioClassification (mya-Mymr)": 50.77, "MassiveScenarioClassification (mon-Cyrl)": 51.87, "MassiveScenarioClassification (tgl-Latn)": 54.36, "MassiveScenarioClassification (cmo-Hant)": 63.73, "MassiveScenarioClassification (ara-Arab)": 58.0, "MassiveScenarioClassification (slv-Latn)": 58.3, "MassiveScenarioClassification (spa-Latn)": 66.47, "MassiveScenarioClassification (urd-Arab)": 56.74, "MassiveScenarioClassification (fin-Latn)": 64.94, "MassiveScenarioClassification (tam-Taml)": 53.86, "MassiveScenarioClassification (ron-Latn)": 63.5, "MassiveScenarioClassification (hye-Armn)": 53.63, "MassiveScenarioClassification (vie-Latn)": 66.35, "MassiveScenarioClassification (deu-Latn)": 68.4, "MassiveScenarioClassification (afr-Latn)": 58.95, "MassiveScenarioClassification (en)": 71.57, "MassiveScenarioClassification (fra-Latn)": 67.37, "MassiveScenarioClassification (jpn-Jpan)": 69.89, "MassiveScenarioClassification (nld-Latn)": 68.62, "MassiveScenarioClassification (cym-Latn)": 43.84, "MassiveScenarioClassification (heb-Hebr)": 62.53, "MassiveScenarioClassification (pol-Latn)": 66.12, "MassiveScenarioClassification (fas-Arab)": 63.92, "MassiveScenarioClassification (lav-Latn)": 56.42, "MassiveScenarioClassification (por-Latn)": 65.49, "MassiveScenarioClassification (rus-Cyrl)": 68.21, "MassiveScenarioClassification (mal-Mlym)": 59.89, "MassiveScenarioClassification (hun-Latn)": 65.75, "MassiveScenarioClassification (nob-Latn)": 66.57, "MassiveScenarioClassification (kor-Kore)": 67.9, "MassiveScenarioClassification (isl-Latn)": 53.28, "MassiveScenarioClassification (khm-Khmr)": 38.45, "MassiveScenarioClassification (sqi-Latn)": 57.92, "MassiveScenarioClassification (jav-Latn)": 51.94, "MassiveScenarioClassification (amh-Ethi)": 50.33, "MassiveScenarioClassification (ita-Latn)": 66.17, "MassiveScenarioClassification (kat-Geor)": 43.38, "MassiveScenarioClassification (swe-Latn)": 69.35, "MassiveScenarioClassification (da)": 67.46, "MassiveScenarioClassification (nb)": 66.18, "MassiveScenarioClassification (sv)": 69.15, "MassiveScenarioClassification (pl)": 66.11, "MultilingualSentiment (cmn-Hans)": 67.56, "MultilingualSentiment": 65.28, "NoRecClassification (nob-Latn)": 53.74, "NoRecClassification": 57.58, "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 75.85, "NordicLangClassification": 75.94, "NorwegianParliament": 59.94, "OnlineShopping (cmn-Hans)": 88.66, "OnlineShopping": 88.4, "PAC (pol-Latn)": 70.87, "PAC": 70.87, "PolEmo2.0-IN (pol-Latn)": 67.59, "PolEmo2.0-IN": 67.66, "PolEmo2.0-OUT (pol-Latn)": 43.93, "PolEmo2.0-OUT": 43.91, "RuReviewsClassification (rus-Cyrl)": 62.99, "RuSciBenchGRNTIClassification (rus-Cyrl)": 56.28, "RuSciBenchOECDClassification (rus-Cyrl)": 42.69, "ScalaDaClassification": 50.79, "ScalaNbClassification": 50.32, "TNews (cmn-Hans)": 47.52, "TNews": 47.06, "ToxicConversationsClassification": 64.33, "TweetSentimentExtractionClassification": 62.8, "Waimai (cmn-Hans)": 85.98, "Waimai": 84.42 } ] }, "Clustering": { "v_measure": [ { "Model": "multilingual-e5-base", "8TagsClustering": 24.97, "AlloProfClusteringP2P": 62.09, "AlloProfClusteringS2S": 32.98, "ArxivClusteringP2P": 43.35, "ArxivClusteringS2S": 36.0, "BiorxivClusteringP2P": 37.55, "BiorxivClusteringS2S": 30.33, "CLSClusteringP2P": 32.41, "CLSClusteringS2S": 36.99, "GeoreviewClusteringP2P (rus-Cyrl)": 54.46, "HALClusteringS2S": 22.48, "MLSUMClusteringP2P (rus-Cyrl)": 43.47, "MLSUMClusteringP2P": 43.48, "MLSUMClusteringS2S (rus-Cyrl)": 40.87, "MLSUMClusteringS2S": 38.53, "MasakhaNEWSClusteringP2P (amh-Ethi)": 58.05, "MasakhaNEWSClusteringP2P (eng)": 43.8, "MasakhaNEWSClusteringP2P (fra-Latn)": 58.28, "MasakhaNEWSClusteringP2P (hau-Latn)": 44.78, "MasakhaNEWSClusteringP2P (ibo-Latn)": 44.97, "MasakhaNEWSClusteringP2P (lin-Latn)": 48.08, "MasakhaNEWSClusteringP2P (lug-Latn)": 50.15, "MasakhaNEWSClusteringP2P (orm-Ethi)": 38.02, "MasakhaNEWSClusteringP2P (pcm-Latn)": 71.03, "MasakhaNEWSClusteringP2P (run-Latn)": 58.28, "MasakhaNEWSClusteringP2P (sna-Latn)": 59.25, "MasakhaNEWSClusteringP2P (som-Latn)": 37.27, "MasakhaNEWSClusteringP2P (swa-Latn)": 34.54, "MasakhaNEWSClusteringP2P (tir-Ethi)": 53.44, "MasakhaNEWSClusteringP2P (xho-Latn)": 40.32, "MasakhaNEWSClusteringP2P (yor-Latn)": 37.97, "MasakhaNEWSClusteringP2P (fra)": 47.91, "MasakhaNEWSClusteringS2S (amh-Ethi)": 49.38, "MasakhaNEWSClusteringS2S (eng)": 45.76, "MasakhaNEWSClusteringS2S (fra-Latn)": 55.43, "MasakhaNEWSClusteringS2S (hau-Latn)": 16.11, "MasakhaNEWSClusteringS2S (ibo-Latn)": 24.38, "MasakhaNEWSClusteringS2S (lin-Latn)": 44.8, "MasakhaNEWSClusteringS2S (lug-Latn)": 45.67, "MasakhaNEWSClusteringS2S (orm-Ethi)": 26.41, "MasakhaNEWSClusteringS2S (pcm-Latn)": 83.26, "MasakhaNEWSClusteringS2S (run-Latn)": 48.77, "MasakhaNEWSClusteringS2S (sna-Latn)": 43.9, "MasakhaNEWSClusteringS2S (som-Latn)": 25.43, "MasakhaNEWSClusteringS2S (swa-Latn)": 9.87, "MasakhaNEWSClusteringS2S (tir-Ethi)": 51.66, "MasakhaNEWSClusteringS2S (xho-Latn)": 29.65, "MasakhaNEWSClusteringS2S (yor-Latn)": 30.12, "MasakhaNEWSClusteringS2S (fra)": 51.16, "MedrxivClusteringP2P": 30.6, "MedrxivClusteringS2S": 28.73, "RedditClustering": 43.15, "RedditClusteringP2P": 61.69, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 51.56, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 44.78, "StackExchangeClustering": 55.31, "StackExchangeClusteringP2P": 33.51, "ThuNewsClusteringP2P": 40.98, "ThuNewsClusteringS2S": 52.36, "TwentyNewsgroupsClustering": 35.55 } ] }, "PairClassification": { "ap": [ { "Model": "multilingual-e5-base", "CDSC-E (pol-Latn)": 72.7, "CDSC-E": 72.67, "Cmnli": 74.51, "Ocnli": 59.63, "OpusparcusPC (deu-Latn)": 95.83, "OpusparcusPC (en)": 98.71, "OpusparcusPC (fin-Latn)": 90.3, "OpusparcusPC (fra-Latn)": 92.12, "OpusparcusPC (rus-Cyrl)": 86.82, "OpusparcusPC (swe-Latn)": 93.05, "OpusparcusPC (fr)": 92.72, "PPC": 88.01, "PSC (pol-Latn)": 99.14, "PSC": 99.14, "PawsXPairClassification (deu-Latn)": 54.11, "PawsXPairClassification (en)": 55.79, "PawsXPairClassification (spa-Latn)": 54.13, "PawsXPairClassification (fra-Latn)": 56.01, "PawsXPairClassification (jpn-Hira)": 49.02, "PawsXPairClassification (kor-Hang)": 51.01, "PawsXPairClassification (cmn-Hans)": 55.13, "PawsXPairClassification (fr)": 56.93, "SICK-E-PL (pol-Latn)": 68.76, "SICK-E-PL": 68.77, "SprintDuplicateQuestions": 93.02, "TERRa (rus-Cyrl)": 54.96, "TwitterSemEval2015": 72.21, "TwitterURLCorpus": 85.48 } ] }, "Reranking": { "map": [ { "Model": "multilingual-e5-base", "AlloprofReranking (fra-Latn)": 65.9, "AlloprofReranking": 58.1, "AskUbuntuDupQuestions": 59.28, "CMedQAv1": 65.21, "CMedQAv2": 66.06, "MMarcoReranking (cmn-Hans)": 30.52, "MMarcoReranking": 21.76, "MindSmallReranking": 29.28, "RuBQReranking (rus-Cyrl)": 72.01, "SciDocsRR": 81.81, "StackOverflowDupQuestions": 49.75, "SyntecReranking (fra-Latn)": 85.31, "SyntecReranking": 85.43, "T2Reranking (cmn-Hans)": 64.86, "T2Reranking": 64.39 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "multilingual-e5-base", "AILACasedocs": 26.05, "AILAStatutes": 20.37, "ARCChallenge": 9.61, "AlloprofRetrieval (fra-Latn)": 34.45, "AlloprofRetrieval": 36.21, "AlphaNLI": 16.44, "ArguAna": 44.21, "ArguAna-PL (pol-Latn)": 42.86, "ArguAna-PL": 42.81, "BSARDRetrieval (fra-Latn)": 18.83, "BSARDRetrieval": 0.0, "CmedqaRetrieval (cmn-Hans)": 27.2, "CmedqaRetrieval": 27.2, "CovidRetrieval (cmn-Hans)": 73.48, "CovidRetrieval": 73.45, "DBPedia-PL": 30.23, "DuRetrieval (cmn-Hans)": 81.66, "DuRetrieval": 81.64, "EcomRetrieval (cmn-Hans)": 54.01, "EcomRetrieval": 54.17, "FiQA-PL (pol-Latn)": 25.59, "FiQA-PL": 25.52, "FiQA2018": 38.15, "GerDaLIRSmall (deu-Latn)": 15.3, "HellaSwag": 24.79, "HotpotQA-PL": 63.52, "LEMBNarrativeQARetrieval": 23.6, "LEMBNeedleRetrieval": 32.0, "LEMBPasskeyRetrieval": 38.25, "LEMBQMSumRetrieval": 25.16, "LEMBSummScreenFDRetrieval": 68.21, "LEMBWikimQARetrieval": 56.04, "LeCaRDv2 (zho-Hans)": 59.0, "LegalBenchConsumerContractsQA": 69.02, "LegalBenchCorporateLobbying": 88.97, "LegalQuAD (deu-Latn)": 47.85, "LegalSummarization": 61.69, "MMarcoRetrieval (cmn-Hans)": 76.01, "MMarcoRetrieval": 76.04, "MSMARCO-PL": 29.52, "MedicalRetrieval (cmn-Hans)": 48.33, "MedicalRetrieval": 48.35, "MintakaRetrieval (ara-Arab)": 23.06, "MintakaRetrieval (deu-Latn)": 29.8, "MintakaRetrieval (spa-Latn)": 29.88, "MintakaRetrieval (fra-Latn)": 30.96, "MintakaRetrieval (hin-Deva)": 22.68, "MintakaRetrieval (ita-Latn)": 29.77, "MintakaRetrieval (jpn-Hira)": 22.98, "MintakaRetrieval (por-Latn)": 30.62, "MintakaRetrieval (fr)": 23.46, "NFCorpus": 32.49, "NFCorpus-PL (pol-Latn)": 25.99, "NFCorpus-PL": 25.98, "NQ-PL": 44.8, "PIQA": 25.09, "Quail": 3.52, "Quora-PL": 81.22, "RARbCode": 52.16, "RARbMath": 65.35, "RiaNewsRetrieval (rus-Cyrl)": 70.24, "RuBQRetrieval (rus-Cyrl)": 69.58, "SCIDOCS": 17.17, "SCIDOCS-PL (pol-Latn)": 12.36, "SCIDOCS-PL": 12.35, "SIQA": 3.72, "SciFact": 69.39, "SciFact-PL (pol-Latn)": 62.26, "SciFact-PL": 62.11, "SpartQA": 7.91, "SyntecRetrieval (fra-Latn)": 82.86, "SyntecRetrieval": 80.49, "T2Retrieval (cmn-Hans)": 70.77, "T2Retrieval": 70.86, "TRECCOVID": 69.5, "TRECCOVID-PL (pol-Latn)": 65.94, "TRECCOVID-PL": 66.06, "TempReasonL1": 0.72, "TempReasonL2Fact": 38.76, "TempReasonL2Pure": 1.63, "TempReasonL3Fact": 35.85, "TempReasonL3Pure": 7.11, "Touche2020": 21.5, "VideoRetrieval (cmn-Hans)": 61.26, "VideoRetrieval": 61.3, "WinoGrande": 56.18, "XPQARetrieval (ara-Arab_ara-Arab)": 39.97, "XPQARetrieval (eng-Latn_ara-Arab)": 17.23, "XPQARetrieval (ara-Arab_eng-Latn)": 34.35, "XPQARetrieval (deu-Latn_deu-Latn)": 72.11, "XPQARetrieval (eng-Latn_deu-Latn)": 28.91, "XPQARetrieval (deu-Latn_eng-Latn)": 61.46, "XPQARetrieval (spa-Latn_spa-Latn)": 58.35, "XPQARetrieval (eng-Latn_spa-Latn)": 25.27, "XPQARetrieval (spa-Latn_eng-Latn)": 51.07, "XPQARetrieval (fra-Latn_fra-Latn)": 59.56, "XPQARetrieval (eng-Latn_fra-Latn)": 23.69, "XPQARetrieval (fra-Latn_eng-Latn)": 53.9, "XPQARetrieval (hin-Deva_hin-Deva)": 70.56, "XPQARetrieval (eng-Latn_hin-Deva)": 27.57, "XPQARetrieval (hin-Deva_eng-Latn)": 63.68, "XPQARetrieval (ita-Latn_ita-Latn)": 70.38, "XPQARetrieval (eng-Latn_ita-Latn)": 26.06, "XPQARetrieval (ita-Latn_eng-Latn)": 56.2, "XPQARetrieval (jpn-Hira_jpn-Hira)": 71.97, "XPQARetrieval (eng-Latn_jpn-Hira)": 17.63, "XPQARetrieval (jpn-Hira_eng-Latn)": 61.03, "XPQARetrieval (kor-Hang_kor-Hang)": 36.12, "XPQARetrieval (eng-Latn_kor-Hang)": 20.27, "XPQARetrieval (kor-Hang_eng-Latn)": 29.26, "XPQARetrieval (pol-Latn_pol-Latn)": 48.1, "XPQARetrieval (eng-Latn_pol-Latn)": 19.48, "XPQARetrieval (pol-Latn_eng-Latn)": 40.18, "XPQARetrieval (por-Latn_por-Latn)": 44.76, "XPQARetrieval (eng-Latn_por-Latn)": 17.66, "XPQARetrieval (por-Latn_eng-Latn)": 40.52, "XPQARetrieval (tam-Taml_tam-Taml)": 35.25, "XPQARetrieval (eng-Latn_tam-Taml)": 12.64, "XPQARetrieval (tam-Taml_eng-Latn)": 26.73, "XPQARetrieval (cmn-Hans_cmn-Hans)": 67.06, "XPQARetrieval (eng-Latn_cmn-Hans)": 12.72, "XPQARetrieval (cmn-Hans_eng-Latn)": 53.53, "XPQARetrieval (fr)": 65.81 } ] }, "STS": { "spearman": [ { "Model": "multilingual-e5-base", "AFQMC (cmn-Hans)": 29.66, "AFQMC": 29.67, "ATEC (cmn-Hans)": 37.01, "ATEC": 37.01, "BIOSSES": 85.05, "BQ (cmn-Hans)": 45.45, "BQ": 45.45, "CDSC-R (pol-Latn)": 90.09, "CDSC-R": 90.08, "LCQMC (cmn-Hans)": 74.15, "LCQMC": 74.15, "PAWSX (cmn-Hans)": 12.13, "PAWSX": 12.14, "QBQTC": 28.81, "RUParaPhraserSTS (rus-Cyrl)": 70.17, "RuSTSBenchmarkSTS (rus-Cyrl)": 79.64, "SICK-R": 78.51, "SICK-R-PL (pol-Latn)": 71.23, "SICK-R-PL": 71.23, "SICKFr (fra-Latn)": 75.76, "SICKFr": 76.23, "STS12": 76.7, "STS13": 78.02, "STS14": 76.6, "STS15": 88.16, "STS16": 84.28, "STS17 (fra-Latn_eng-Latn)": 80.18, "STS17 (ita-Latn_eng-Latn)": 80.16, "STS17 (eng-Latn_ara-Arab)": 71.27, "STS17 (kor-Hang)": 79.95, "STS17 (eng-Latn_tur-Latn)": 63.3, "STS17 (spa-Latn_eng-Latn)": 76.56, "STS17 (spa-Latn)": 86.74, "STS17 (en-en)": 87.84, "STS17 (ara-Arab)": 74.48, "STS17 (nld-Latn_eng-Latn)": 79.29, "STS17 (eng-Latn_deu-Latn)": 82.08, "STS22 (fra-Latn)": 75.04, "STS22 (ara-Arab)": 57.82, "STS22 (en)": 62.26, "STS22 (spa-Latn)": 66.67, "STS22 (fra-Latn_pol-Latn)": 73.25, "STS22 (ita-Latn)": 77.76, "STS22 (pol-Latn_eng-Latn)": 70.37, "STS22 (tur-Latn)": 63.71, "STS22 (rus-Cyrl)": 60.67, "STS22 (deu-Latn)": 55.95, "STS22 (deu-Latn_fra-Latn)": 59.68, "STS22 (spa-Latn_eng-Latn)": 74.0, "STS22 (cmn-Hans_eng-Latn)": 69.8, "STS22 (pol-Latn)": 34.08, "STS22 (spa-Latn_ita-Latn)": 66.43, "STS22 (cmn-Hans)": 65.63, "STS22 (deu-Latn_pol-Latn)": 39.35, "STS22 (deu-Latn_eng-Latn)": 54.89, "STS22 (zh)": 65.64, "STS22 (pl)": 34.07, "STSB (cmn-Hans)": 79.04, "STSB": 79.05, "STSBenchmark": 85.64, "STSBenchmarkMultilingualSTS (rus-Cyrl)": 79.33, "STSBenchmarkMultilingualSTS (pol-Latn)": 74.93, "STSBenchmarkMultilingualSTS (spa-Latn)": 81.75, "STSBenchmarkMultilingualSTS (en)": 85.64, "STSBenchmarkMultilingualSTS (cmn-Hans)": 79.87, "STSBenchmarkMultilingualSTS (fra-Latn)": 80.85, "STSBenchmarkMultilingualSTS (deu-Latn)": 79.68, "STSBenchmarkMultilingualSTS (nld-Latn)": 75.96, "STSBenchmarkMultilingualSTS (por-Latn)": 67.16, "STSBenchmarkMultilingualSTS (ita-Latn)": 78.09, "STSBenchmarkMultilingualSTS (fr)": 80.62 } ] }, "Summarization": { "spearman": [ { "Model": "multilingual-e5-base", "SummEval": 30.23, "SummEvalFr (fra-Latn)": 32.96, "SummEvalFr": 30.76 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "multilingual-e5-base" } ] } }, "norbert3-base": { "BitextMining": { "f1": [ { "Model": "norbert3-base", "BornholmBitextMining": 6.08 } ] }, "Classification": { "accuracy": [ { "Model": "norbert3-base", "AngryTweetsClassification": 52.48, "DKHateClassification": 58.78, "DanishPoliticalCommentsClassification": 34.14, "LccSentimentClassification": 54.07, "MassiveIntentClassification (da)": 53.16, "MassiveIntentClassification (nb)": 54.2, "MassiveIntentClassification (sv)": 52.08, "MassiveScenarioClassification (da)": 57.17, "MassiveScenarioClassification (nb)": 60.69, "MassiveScenarioClassification (sv)": 53.53, "NoRecClassification": 53.4, "NordicLangClassification": 82.67, "NorwegianParliament": 59.33, "ScalaDaClassification": 58.25, "ScalaNbClassification": 60.19 } ] }, "Clustering": { "v_measure": [ { "Model": "norbert3-base" } ] }, "PairClassification": { "ap": [ { "Model": "norbert3-base" } ] }, "Reranking": { "map": [ { "Model": "norbert3-base" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "norbert3-base" } ] }, "STS": { "spearman": [ { "Model": "norbert3-base" } ] }, "Summarization": { "spearman": [ { "Model": "norbert3-base" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "norbert3-base" } ] } }, "voyage-code-2": { "BitextMining": { "f1": [ { "Model": "voyage-code-2" } ] }, "Classification": { "accuracy": [ { "Model": "voyage-code-2", "AmazonReviewsClassification (fr)": 42.15, "MTOPDomainClassification (fr)": 87.68, "MTOPIntentClassification (fr)": 59.44, "MasakhaNEWSClassification (fra)": 82.13, "MassiveIntentClassification (fr)": 63.08, "MassiveScenarioClassification (fr)": 70.15 } ] }, "Clustering": { "v_measure": [ { "Model": "voyage-code-2", "AlloProfClusteringP2P": 61.63, "AlloProfClusteringS2S": 50.67, "HALClusteringS2S": 27.44, "MLSUMClusteringP2P": 45.23, "MLSUMClusteringS2S": 41.48, "MasakhaNEWSClusteringP2P (fra)": 56.59, "MasakhaNEWSClusteringS2S (fra)": 35.18 } ] }, "PairClassification": { "ap": [ { "Model": "voyage-code-2", "OpusparcusPC (fr)": 92.87, "PawsXPairClassification (fr)": 60.83 } ] }, "Reranking": { "map": [ { "Model": "voyage-code-2", "AlloprofReranking": 70.79, "SyntecReranking": 86.77 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "voyage-code-2", "AlloprofRetrieval": 52.61, "BSARDRetrieval": 0.29, "MintakaRetrieval (fr)": 19.05, "SyntecRetrieval": 82.77, "XPQARetrieval (fr)": 71.95 } ] }, "STS": { "spearman": [ { "Model": "voyage-code-2", "SICKFr": 73.56, "STS22 (fr)": 79.99, "STSBenchmarkMultilingualSTS (fr)": 79.02 } ] }, "Summarization": { "spearman": [ { "Model": "voyage-code-2", "SummEvalFr": 28.34 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "voyage-code-2" } ] } }, "mistral-7b-instruct-v0.2": { "BitextMining": { "f1": [ { "Model": "mistral-7b-instruct-v0.2" } ] }, "Classification": { "accuracy": [ { "Model": "mistral-7b-instruct-v0.2" } ] }, "Clustering": { "v_measure": [ { "Model": "mistral-7b-instruct-v0.2" } ] }, "PairClassification": { "ap": [ { "Model": "mistral-7b-instruct-v0.2" } ] }, "Reranking": { "map": [ { "Model": "mistral-7b-instruct-v0.2" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "mistral-7b-instruct-v0.2" } ] }, "STS": { "spearman": [ { "Model": "mistral-7b-instruct-v0.2" } ] }, "Summarization": { "spearman": [ { "Model": "mistral-7b-instruct-v0.2" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "mistral-7b-instruct-v0.2", "Core17InstructionRetrieval": 13.03, "News21InstructionRetrieval": 4.81, "Robust04InstructionRetrieval": 12.61 } ] } }, "sentence-t5-large": { "BitextMining": { "f1": [ { "Model": "sentence-t5-large", "BUCC (de-en)": 87.0, "BUCC (fr-en)": 88.91, "BUCC (ru-en)": 0.44, "BUCC (zh-en)": 0.95, "Tatoeba (afr-eng)": 23.7, "Tatoeba (amh-eng)": 0.65, "Tatoeba (ang-eng)": 30.98, "Tatoeba (ara-eng)": 0.48, "Tatoeba (arq-eng)": 0.68, "Tatoeba (arz-eng)": 0.22, "Tatoeba (ast-eng)": 55.3, "Tatoeba (awa-eng)": 1.03, "Tatoeba (aze-eng)": 5.83, "Tatoeba (bel-eng)": 1.66, "Tatoeba (ben-eng)": 0.0, "Tatoeba (ber-eng)": 5.62, "Tatoeba (bos-eng)": 12.23, "Tatoeba (bre-eng)": 5.84, "Tatoeba (bul-eng)": 1.35, "Tatoeba (cat-eng)": 48.56, "Tatoeba (cbk-eng)": 46.97, "Tatoeba (ceb-eng)": 9.79, "Tatoeba (ces-eng)": 6.0, "Tatoeba (cha-eng)": 24.21, "Tatoeba (cmn-eng)": 2.26, "Tatoeba (cor-eng)": 4.03, "Tatoeba (csb-eng)": 9.53, "Tatoeba (cym-eng)": 9.17, "Tatoeba (dan-eng)": 34.63, "Tatoeba (deu-eng)": 89.31, "Tatoeba (dsb-eng)": 9.68, "Tatoeba (dtp-eng)": 4.66, "Tatoeba (ell-eng)": 0.77, "Tatoeba (epo-eng)": 26.88, "Tatoeba (est-eng)": 5.19, "Tatoeba (eus-eng)": 9.46, "Tatoeba (fao-eng)": 21.59, "Tatoeba (fin-eng)": 5.66, "Tatoeba (fra-eng)": 79.71, "Tatoeba (fry-eng)": 28.29, "Tatoeba (gla-eng)": 2.34, "Tatoeba (gle-eng)": 3.55, "Tatoeba (glg-eng)": 56.25, "Tatoeba (gsw-eng)": 24.25, "Tatoeba (heb-eng)": 0.57, "Tatoeba (hin-eng)": 0.12, "Tatoeba (hrv-eng)": 10.29, "Tatoeba (hsb-eng)": 9.52, "Tatoeba (hun-eng)": 6.22, "Tatoeba (hye-eng)": 0.81, "Tatoeba (ido-eng)": 41.11, "Tatoeba (ile-eng)": 54.0, "Tatoeba (ina-eng)": 75.47, "Tatoeba (ind-eng)": 13.02, "Tatoeba (isl-eng)": 8.98, "Tatoeba (ita-eng)": 67.23, "Tatoeba (jav-eng)": 8.54, "Tatoeba (jpn-eng)": 0.99, "Tatoeba (kab-eng)": 1.85, "Tatoeba (kat-eng)": 1.37, "Tatoeba (kaz-eng)": 0.67, "Tatoeba (khm-eng)": 0.56, "Tatoeba (kor-eng)": 1.73, "Tatoeba (kur-eng)": 9.23, "Tatoeba (kzj-eng)": 5.38, "Tatoeba (lat-eng)": 21.3, "Tatoeba (lfn-eng)": 40.48, "Tatoeba (lit-eng)": 5.38, "Tatoeba (lvs-eng)": 6.83, "Tatoeba (mal-eng)": 0.45, "Tatoeba (mar-eng)": 0.01, "Tatoeba (max-eng)": 16.44, "Tatoeba (mhr-eng)": 0.33, "Tatoeba (mkd-eng)": 0.4, "Tatoeba (mon-eng)": 2.48, "Tatoeba (nds-eng)": 34.66, "Tatoeba (nld-eng)": 42.72, "Tatoeba (nno-eng)": 24.08, "Tatoeba (nob-eng)": 34.17, "Tatoeba (nov-eng)": 55.01, "Tatoeba (oci-eng)": 29.15, "Tatoeba (orv-eng)": 0.2, "Tatoeba (pam-eng)": 6.99, "Tatoeba (pes-eng)": 0.9, "Tatoeba (pms-eng)": 30.8, "Tatoeba (pol-eng)": 12.81, "Tatoeba (por-eng)": 73.45, "Tatoeba (ron-eng)": 54.86, "Tatoeba (rus-eng)": 2.43, "Tatoeba (slk-eng)": 8.35, "Tatoeba (slv-eng)": 9.3, "Tatoeba (spa-eng)": 78.87, "Tatoeba (sqi-eng)": 11.74, "Tatoeba (srp-eng)": 5.83, "Tatoeba (swe-eng)": 35.41, "Tatoeba (swg-eng)": 28.18, "Tatoeba (swh-eng)": 7.53, "Tatoeba (tam-eng)": 0.36, "Tatoeba (tat-eng)": 1.01, "Tatoeba (tel-eng)": 1.1, "Tatoeba (tgl-eng)": 12.4, "Tatoeba (tha-eng)": 1.58, "Tatoeba (tuk-eng)": 4.95, "Tatoeba (tur-eng)": 6.45, "Tatoeba (tzl-eng)": 37.82, "Tatoeba (uig-eng)": 0.67, "Tatoeba (ukr-eng)": 1.88, "Tatoeba (urd-eng)": 0.0, "Tatoeba (uzb-eng)": 4.79, "Tatoeba (vie-eng)": 7.03, "Tatoeba (war-eng)": 9.68, "Tatoeba (wuu-eng)": 1.28, "Tatoeba (xho-eng)": 10.64, "Tatoeba (yid-eng)": 0.57, "Tatoeba (yue-eng)": 0.88, "Tatoeba (zsm-eng)": 14.67 } ] }, "Classification": { "accuracy": [ { "Model": "sentence-t5-large", "AmazonCounterfactualClassification (de)": 67.97, "AmazonCounterfactualClassification (en)": 75.51, "AmazonCounterfactualClassification (en-ext)": 75.44, "AmazonCounterfactualClassification (ja)": 45.72, "AmazonPolarityClassification": 92.87, "AmazonReviewsClassification (de)": 43.16, "AmazonReviewsClassification (en)": 47.12, "AmazonReviewsClassification (es)": 42.89, "AmazonReviewsClassification (fr)": 41.48, "AmazonReviewsClassification (ja)": 22.49, "AmazonReviewsClassification (zh)": 22.12, "Banking77Classification": 78.46, "EmotionClassification": 51.74, "ImdbClassification": 87.01, "MTOPDomainClassification (de)": 80.56, "MTOPDomainClassification (en)": 90.99, "MTOPDomainClassification (es)": 80.78, "MTOPDomainClassification (fr)": 79.6, "MTOPDomainClassification (hi)": 21.22, "MTOPDomainClassification (th)": 15.82, "MTOPIntentClassification (de)": 52.5, "MTOPIntentClassification (en)": 64.98, "MTOPIntentClassification (es)": 52.07, "MTOPIntentClassification (fr)": 47.73, "MTOPIntentClassification (hi)": 3.74, "MTOPIntentClassification (th)": 4.96, "MasakhaNEWSClassification (fra)": 80.43, "MassiveIntentClassification (af)": 38.41, "MassiveIntentClassification (am)": 2.49, "MassiveIntentClassification (ar)": 4.7, "MassiveIntentClassification (az)": 31.77, "MassiveIntentClassification (bn)": 2.77, "MassiveIntentClassification (cy)": 31.69, "MassiveIntentClassification (da)": 41.76, "MassiveIntentClassification (de)": 52.01, "MassiveIntentClassification (el)": 9.74, "MassiveIntentClassification (en)": 71.78, "MassiveIntentClassification (es)": 54.1, "MassiveIntentClassification (fa)": 3.86, "MassiveIntentClassification (fi)": 34.07, "MassiveIntentClassification (fr)": 57.01, "MassiveIntentClassification (he)": 2.14, "MassiveIntentClassification (hi)": 2.97, "MassiveIntentClassification (hu)": 32.01, "MassiveIntentClassification (hy)": 3.17, "MassiveIntentClassification (id)": 34.55, "MassiveIntentClassification (is)": 32.0, "MassiveIntentClassification (it)": 52.94, "MassiveIntentClassification (ja)": 2.9, "MassiveIntentClassification (jv)": 32.42, "MassiveIntentClassification (ka)": 2.71, "MassiveIntentClassification (km)": 5.5, "MassiveIntentClassification (kn)": 2.41, "MassiveIntentClassification (ko)": 2.57, "MassiveIntentClassification (lv)": 35.09, "MassiveIntentClassification (ml)": 2.95, "MassiveIntentClassification (mn)": 18.33, "MassiveIntentClassification (ms)": 29.69, "MassiveIntentClassification (my)": 3.99, "MassiveIntentClassification (nb)": 41.29, "MassiveIntentClassification (nl)": 44.95, "MassiveIntentClassification (pl)": 37.67, "MassiveIntentClassification (pt)": 51.96, "MassiveIntentClassification (ro)": 43.83, "MassiveIntentClassification (ru)": 17.32, "MassiveIntentClassification (sl)": 33.71, "MassiveIntentClassification (sq)": 37.62, "MassiveIntentClassification (sv)": 40.67, "MassiveIntentClassification (sw)": 31.9, "MassiveIntentClassification (ta)": 1.91, "MassiveIntentClassification (te)": 2.54, "MassiveIntentClassification (th)": 3.85, "MassiveIntentClassification (tl)": 36.83, "MassiveIntentClassification (tr)": 33.0, "MassiveIntentClassification (ur)": 2.62, "MassiveIntentClassification (vi)": 22.81, "MassiveIntentClassification (zh-CN)": 1.09, "MassiveIntentClassification (zh-TW)": 3.49, "MassiveScenarioClassification (af)": 50.28, "MassiveScenarioClassification (am)": 7.15, "MassiveScenarioClassification (ar)": 12.12, "MassiveScenarioClassification (az)": 39.68, "MassiveScenarioClassification (bn)": 8.06, "MassiveScenarioClassification (cy)": 38.01, "MassiveScenarioClassification (da)": 51.44, "MassiveScenarioClassification (de)": 62.71, "MassiveScenarioClassification (el)": 17.19, "MassiveScenarioClassification (en)": 73.16, "MassiveScenarioClassification (es)": 59.56, "MassiveScenarioClassification (fa)": 6.5, "MassiveScenarioClassification (fi)": 41.72, "MassiveScenarioClassification (fr)": 63.6, "MassiveScenarioClassification (he)": 7.93, "MassiveScenarioClassification (hi)": 7.85, "MassiveScenarioClassification (hu)": 41.37, "MassiveScenarioClassification (hy)": 9.42, "MassiveScenarioClassification (id)": 44.88, "MassiveScenarioClassification (is)": 40.86, "MassiveScenarioClassification (it)": 60.09, "MassiveScenarioClassification (ja)": 6.56, "MassiveScenarioClassification (jv)": 40.18, "MassiveScenarioClassification (ka)": 7.37, "MassiveScenarioClassification (km)": 9.56, "MassiveScenarioClassification (kn)": 8.4, "MassiveScenarioClassification (ko)": 5.96, "MassiveScenarioClassification (lv)": 41.44, "MassiveScenarioClassification (ml)": 7.47, "MassiveScenarioClassification (mn)": 25.36, "MassiveScenarioClassification (ms)": 39.69, "MassiveScenarioClassification (my)": 9.68, "MassiveScenarioClassification (nb)": 49.92, "MassiveScenarioClassification (nl)": 56.09, "MassiveScenarioClassification (pl)": 45.2, "MassiveScenarioClassification (pt)": 57.99, "MassiveScenarioClassification (ro)": 56.0, "MassiveScenarioClassification (ru)": 27.47, "MassiveScenarioClassification (sl)": 41.04, "MassiveScenarioClassification (sq)": 49.38, "MassiveScenarioClassification (sv)": 50.97, "MassiveScenarioClassification (sw)": 40.62, "MassiveScenarioClassification (ta)": 7.59, "MassiveScenarioClassification (te)": 7.07, "MassiveScenarioClassification (th)": 8.52, "MassiveScenarioClassification (tl)": 49.89, "MassiveScenarioClassification (tr)": 43.08, "MassiveScenarioClassification (ur)": 9.31, "MassiveScenarioClassification (vi)": 27.46, "MassiveScenarioClassification (zh-CN)": 4.7, "MassiveScenarioClassification (zh-TW)": 7.24, "ToxicConversationsClassification": 71.73, "TweetSentimentExtractionClassification": 62.33 } ] }, "Clustering": { "v_measure": [ { "Model": "sentence-t5-large", "AlloProfClusteringP2P": 61.82, "AlloProfClusteringS2S": 39.78, "ArxivClusteringP2P": 41.62, "ArxivClusteringS2S": 29.44, "BiorxivClusteringP2P": 35.99, "BiorxivClusteringS2S": 24.02, "BlurbsClusteringP2P": 35.33, "BlurbsClusteringS2S": 13.27, "HALClusteringS2S": 18.73, "MLSUMClusteringP2P": 42.07, "MLSUMClusteringS2S": 31.87, "MasakhaNEWSClusteringP2P (fra)": 58.6, "MasakhaNEWSClusteringS2S (fra)": 31.33, "MedrxivClusteringP2P": 32.4, "MedrxivClusteringS2S": 26.33, "RedditClustering": 54.53, "RedditClusteringP2P": 62.5, "StackExchangeClustering": 65.11, "StackExchangeClusteringP2P": 36.86, "TenKGnadClusteringP2P": 44.11, "TenKGnadClusteringS2S": 17.26, "TwentyNewsgroupsClustering": 49.33 } ] }, "PairClassification": { "ap": [ { "Model": "sentence-t5-large", "OpusparcusPC (fr)": 91.19, "PawsXPairClassification (fr)": 59.59, "SprintDuplicateQuestions": 89.01, "TwitterSemEval2015": 79.75, "TwitterURLCorpus": 86.14 } ] }, "Reranking": { "map": [ { "Model": "sentence-t5-large", "AlloprofReranking": 57.99, "AskUbuntuDupQuestions": 61.51, "MindSmallReranking": 30.27, "SciDocsRR": 74.88, "StackOverflowDupQuestions": 49.34, "SyntecReranking": 79.77 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "sentence-t5-large", "AlloprofRetrieval": 34.52, "ArguAna": 39.27, "BSARDRetrieval": 0.0, "CQADupstackRetrieval": 38.96, "ClimateFEVER": 11.36, "DBPedia": 31.55, "FEVER": 36.21, "FiQA2018": 43.55, "HotpotQA": 33.95, "MSMARCO": 23.96, "MintakaRetrieval (fr)": 23.92, "NFCorpus": 31.1, "NQ": 42.02, "QuoraRetrieval": 85.73, "SCIDOCS": 15.38, "SciFact": 49.91, "SyntecRetrieval": 71.05, "TRECCOVID": 46.11, "Touche2020": 21.63, "XPQARetrieval (fr)": 48.79 } ] }, "STS": { "spearman": [ { "Model": "sentence-t5-large", "BIOSSES": 78.93, "SICK-R": 80.34, "SICKFr": 72.83, "STS12": 79.11, "STS13": 87.33, "STS14": 83.17, "STS15": 88.28, "STS16": 84.36, "STS17 (ar-ar)": 10.75, "STS17 (en-ar)": -4.71, "STS17 (en-de)": 73.62, "STS17 (en-en)": 88.99, "STS17 (en-tr)": -0.42, "STS17 (es-en)": 62.62, "STS17 (es-es)": 82.74, "STS17 (fr-en)": 67.86, "STS17 (it-en)": 51.86, "STS17 (ko-ko)": 9.44, "STS17 (nl-en)": 45.95, "STS22 (ar)": 27.01, "STS22 (de)": 43.73, "STS22 (de-en)": 49.93, "STS22 (de-fr)": 61.58, "STS22 (de-pl)": 38.83, "STS22 (en)": 62.39, "STS22 (es)": 57.68, "STS22 (es-en)": 68.09, "STS22 (es-it)": 61.58, "STS22 (fr)": 75.01, "STS22 (fr-pl)": 5.63, "STS22 (it)": 62.01, "STS22 (pl)": 25.0, "STS22 (pl-en)": 51.72, "STS22 (ru)": 14.21, "STS22 (tr)": 47.3, "STS22 (zh)": 30.47, "STS22 (zh-en)": 23.1, "STSBenchmark": 85.36, "STSBenchmarkMultilingualSTS (fr)": 77.59 } ] }, "Summarization": { "spearman": [ { "Model": "sentence-t5-large", "SummEval": 29.64, "SummEvalFr": 30.23 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "sentence-t5-large" } ] } }, "rubert-base-cased": { "BitextMining": { "f1": [ { "Model": "rubert-base-cased", "Tatoeba (rus-Cyrl_eng-Latn)": 16.76 } ] }, "Classification": { "accuracy": [ { "Model": "rubert-base-cased", "GeoreviewClassification (rus-Cyrl)": 37.22, "HeadlineClassification (rus-Cyrl)": 75.23, "InappropriatenessClassification (rus-Cyrl)": 57.34, "KinopoiskClassification (rus-Cyrl)": 49.91, "MassiveIntentClassification (rus-Cyrl)": 53.02, "MassiveScenarioClassification (rus-Cyrl)": 56.79, "RuReviewsClassification (rus-Cyrl)": 50.74, "RuSciBenchGRNTIClassification (rus-Cyrl)": 48.03, "RuSciBenchOECDClassification (rus-Cyrl)": 36.13 } ] }, "Clustering": { "v_measure": [ { "Model": "rubert-base-cased", "GeoreviewClusteringP2P (rus-Cyrl)": 28.77, "MLSUMClusteringP2P (rus-Cyrl)": 41.42, "MLSUMClusteringS2S (rus-Cyrl)": 40.52, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 28.29, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 26.67 } ] }, "PairClassification": { "ap": [ { "Model": "rubert-base-cased", "OpusparcusPC (rus-Cyrl)": 81.65, "TERRa (rus-Cyrl)": 52.12 } ] }, "Reranking": { "map": [ { "Model": "rubert-base-cased", "RuBQReranking (rus-Cyrl)": 41.65 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "rubert-base-cased", "RiaNewsRetrieval (rus-Cyrl)": 5.58, "RuBQRetrieval (rus-Cyrl)": 9.52 } ] }, "STS": { "spearman": [ { "Model": "rubert-base-cased", "RUParaPhraserSTS (rus-Cyrl)": 49.72, "RuSTSBenchmarkSTS (rus-Cyrl)": 53.95, "STS22 (rus-Cyrl)": 34.98, "STSBenchmarkMultilingualSTS (rus-Cyrl)": 53.76 } ] }, "Summarization": { "spearman": [ { "Model": "rubert-base-cased" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "rubert-base-cased" } ] } }, "bge-base-en-v1.5": { "BitextMining": { "f1": [ { "Model": "bge-base-en-v1.5" } ] }, "Classification": { "accuracy": [ { "Model": "bge-base-en-v1.5" } ] }, "Clustering": { "v_measure": [ { "Model": "bge-base-en-v1.5", "BiorxivClusteringP2P": 39.44, "BiorxivClusteringS2S": 36.62, "MedrxivClusteringP2P": 33.21, "MedrxivClusteringS2S": 31.68, "RedditClustering": 56.61, "RedditClusteringP2P": 62.66, "StackExchangeClustering": 66.11, "StackExchangeClusteringP2P": 35.24, "TwentyNewsgroupsClustering": 50.75 } ] }, "PairClassification": { "ap": [ { "Model": "bge-base-en-v1.5" } ] }, "Reranking": { "map": [ { "Model": "bge-base-en-v1.5" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "bge-base-en-v1.5", "ARCChallenge": 9.66, "AlphaNLI": 10.99, "HellaSwag": 26.64, "PIQA": 25.69, "Quail": 1.42, "RARbCode": 46.47, "RARbMath": 46.86, "SIQA": 0.94, "SpartQA": 3.37, "TempReasonL1": 1.07, "TempReasonL2Fact": 17.23, "TempReasonL2Pure": 1.29, "TempReasonL3Fact": 13.36, "TempReasonL3Pure": 5.2, "WinoGrande": 13.76 } ] }, "STS": { "spearman": [ { "Model": "bge-base-en-v1.5" } ] }, "Summarization": { "spearman": [ { "Model": "bge-base-en-v1.5" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "bge-base-en-v1.5" } ] } }, "text-search-ada-doc-001": { "BitextMining": { "f1": [ { "Model": "text-search-ada-doc-001" } ] }, "Classification": { "accuracy": [ { "Model": "text-search-ada-doc-001" } ] }, "Clustering": { "v_measure": [ { "Model": "text-search-ada-doc-001", "TwentyNewsgroupsClustering": 32.92 } ] }, "PairClassification": { "ap": [ { "Model": "text-search-ada-doc-001" } ] }, "Reranking": { "map": [ { "Model": "text-search-ada-doc-001" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "text-search-ada-doc-001" } ] }, "STS": { "spearman": [ { "Model": "text-search-ada-doc-001" } ] }, "Summarization": { "spearman": [ { "Model": "text-search-ada-doc-001" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "text-search-ada-doc-001" } ] } }, "flaubert_base_uncased": { "BitextMining": { "f1": [ { "Model": "flaubert_base_uncased" } ] }, "Classification": { "accuracy": [ { "Model": "flaubert_base_uncased", "AmazonReviewsClassification (fr)": 23.52, "MTOPDomainClassification (fr)": 27.74, "MTOPIntentClassification (fr)": 8.61, "MasakhaNEWSClassification (fra)": 62.61, "MassiveIntentClassification (fr)": 6.24, "MassiveScenarioClassification (fr)": 10.98 } ] }, "Clustering": { "v_measure": [ { "Model": "flaubert_base_uncased", "AlloProfClusteringP2P": 43.2, "AlloProfClusteringS2S": 12.94, "HALClusteringS2S": 1.8, "MLSUMClusteringP2P": 33.22, "MLSUMClusteringS2S": 14.9, "MasakhaNEWSClusteringP2P (fra)": 28.49, "MasakhaNEWSClusteringS2S (fra)": 22.58 } ] }, "PairClassification": { "ap": [ { "Model": "flaubert_base_uncased", "OpusparcusPC (fr)": 82.0, "PawsXPairClassification (fr)": 52.78 } ] }, "Reranking": { "map": [ { "Model": "flaubert_base_uncased", "AlloprofReranking": 34.55, "SyntecReranking": 57.18 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "flaubert_base_uncased", "AlloprofRetrieval": 1.72, "BSARDRetrieval": 0.0, "MintakaRetrieval (fr)": 0.51, "SyntecRetrieval": 22.33, "XPQARetrieval (fr)": 9.09 } ] }, "STS": { "spearman": [ { "Model": "flaubert_base_uncased", "SICKFr": 41.9, "STS22 (fr)": 55.15, "STSBenchmarkMultilingualSTS (fr)": 33.41 } ] }, "Summarization": { "spearman": [ { "Model": "flaubert_base_uncased", "SummEvalFr": 29.43 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "flaubert_base_uncased" } ] } }, "cross-en-de-roberta-sentence-transformer": { "BitextMining": { "f1": [ { "Model": "cross-en-de-roberta-sentence-transformer" } ] }, "Classification": { "accuracy": [ { "Model": "cross-en-de-roberta-sentence-transformer" } ] }, "Clustering": { "v_measure": [ { "Model": "cross-en-de-roberta-sentence-transformer", "BlurbsClusteringP2P": 30.82, "BlurbsClusteringS2S": 12.69, "TenKGnadClusteringP2P": 23.5, "TenKGnadClusteringS2S": 10.94 } ] }, "PairClassification": { "ap": [ { "Model": "cross-en-de-roberta-sentence-transformer" } ] }, "Reranking": { "map": [ { "Model": "cross-en-de-roberta-sentence-transformer" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "cross-en-de-roberta-sentence-transformer" } ] }, "STS": { "spearman": [ { "Model": "cross-en-de-roberta-sentence-transformer" } ] }, "Summarization": { "spearman": [ { "Model": "cross-en-de-roberta-sentence-transformer" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "cross-en-de-roberta-sentence-transformer" } ] } }, "bert-base-10lang-cased": { "BitextMining": { "f1": [ { "Model": "bert-base-10lang-cased" } ] }, "Classification": { "accuracy": [ { "Model": "bert-base-10lang-cased", "AmazonReviewsClassification (fr)": 29.38, "MTOPDomainClassification (fr)": 63.65, "MTOPIntentClassification (fr)": 37.87, "MasakhaNEWSClassification (fra)": 63.93, "MassiveIntentClassification (fr)": 37.28, "MassiveScenarioClassification (fr)": 44.5 } ] }, "Clustering": { "v_measure": [ { "Model": "bert-base-10lang-cased", "AlloProfClusteringP2P": 53.22, "AlloProfClusteringS2S": 42.92, "HALClusteringS2S": 19.94, "MLSUMClusteringP2P": 40.96, "MLSUMClusteringS2S": 31.87, "MasakhaNEWSClusteringP2P (fra)": 24.23, "MasakhaNEWSClusteringS2S (fra)": 24.46 } ] }, "PairClassification": { "ap": [ { "Model": "bert-base-10lang-cased", "OpusparcusPC (fr)": 86.79, "PawsXPairClassification (fr)": 53.4 } ] }, "Reranking": { "map": [ { "Model": "bert-base-10lang-cased", "AlloprofReranking": 36.21, "SyntecReranking": 53.25 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "bert-base-10lang-cased", "AlloprofRetrieval": 1.6, "BSARDRetrieval": 0.0, "MintakaRetrieval (fr)": 3.55, "SyntecRetrieval": 18.95, "XPQARetrieval (fr)": 18.39 } ] }, "STS": { "spearman": [ { "Model": "bert-base-10lang-cased", "SICKFr": 58.76, "STS22 (fr)": 40.31, "STSBenchmarkMultilingualSTS (fr)": 52.25 } ] }, "Summarization": { "spearman": [ { "Model": "bert-base-10lang-cased", "SummEvalFr": 29.06 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "bert-base-10lang-cased" } ] } }, "flan-t5-large": { "BitextMining": { "f1": [ { "Model": "flan-t5-large" } ] }, "Classification": { "accuracy": [ { "Model": "flan-t5-large" } ] }, "Clustering": { "v_measure": [ { "Model": "flan-t5-large" } ] }, "PairClassification": { "ap": [ { "Model": "flan-t5-large" } ] }, "Reranking": { "map": [ { "Model": "flan-t5-large" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "flan-t5-large" } ] }, "STS": { "spearman": [ { "Model": "flan-t5-large" } ] }, "Summarization": { "spearman": [ { "Model": "flan-t5-large" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "flan-t5-large", "Core17InstructionRetrieval": 1.32, "News21InstructionRetrieval": 8.95, "Robust04InstructionRetrieval": 3.9 } ] } }, "llama-2-7b-chat": { "BitextMining": { "f1": [ { "Model": "llama-2-7b-chat" } ] }, "Classification": { "accuracy": [ { "Model": "llama-2-7b-chat" } ] }, "Clustering": { "v_measure": [ { "Model": "llama-2-7b-chat" } ] }, "PairClassification": { "ap": [ { "Model": "llama-2-7b-chat" } ] }, "Reranking": { "map": [ { "Model": "llama-2-7b-chat" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "llama-2-7b-chat" } ] }, "STS": { "spearman": [ { "Model": "llama-2-7b-chat" } ] }, "Summarization": { "spearman": [ { "Model": "llama-2-7b-chat" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "llama-2-7b-chat", "Core17InstructionRetrieval": 2.84, "News21InstructionRetrieval": 0.23, "Robust04InstructionRetrieval": 2.0 } ] } }, "bge-small-en-v1.5": { "BitextMining": { "f1": [ { "Model": "bge-small-en-v1.5" } ] }, "Classification": { "accuracy": [ { "Model": "bge-small-en-v1.5" } ] }, "Clustering": { "v_measure": [ { "Model": "bge-small-en-v1.5" } ] }, "PairClassification": { "ap": [ { "Model": "bge-small-en-v1.5" } ] }, "Reranking": { "map": [ { "Model": "bge-small-en-v1.5" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "bge-small-en-v1.5", "ARCChallenge": 8.95, "AlphaNLI": 11.64, "HellaSwag": 25.44, "PIQA": 23.92, "Quail": 1.75, "RARbCode": 42.36, "RARbMath": 44.98, "SIQA": 0.77, "SpartQA": 3.55, "TempReasonL1": 1.41, "TempReasonL2Fact": 17.56, "TempReasonL2Pure": 1.05, "TempReasonL3Fact": 13.88, "TempReasonL3Pure": 4.76, "WinoGrande": 10.28 } ] }, "STS": { "spearman": [ { "Model": "bge-small-en-v1.5" } ] }, "Summarization": { "spearman": [ { "Model": "bge-small-en-v1.5" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "bge-small-en-v1.5" } ] } }, "tart-dual-contriever-msmarco": { "BitextMining": { "f1": [ { "Model": "tart-dual-contriever-msmarco" } ] }, "Classification": { "accuracy": [ { "Model": "tart-dual-contriever-msmarco" } ] }, "Clustering": { "v_measure": [ { "Model": "tart-dual-contriever-msmarco" } ] }, "PairClassification": { "ap": [ { "Model": "tart-dual-contriever-msmarco" } ] }, "Reranking": { "map": [ { "Model": "tart-dual-contriever-msmarco" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "tart-dual-contriever-msmarco" } ] }, "STS": { "spearman": [ { "Model": "tart-dual-contriever-msmarco" } ] }, "Summarization": { "spearman": [ { "Model": "tart-dual-contriever-msmarco" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "tart-dual-contriever-msmarco", "Core17InstructionRetrieval": -3.04, "News21InstructionRetrieval": -2.98, "Robust04InstructionRetrieval": -8.98 } ] } }, "bert-base-15lang-cased": { "BitextMining": { "f1": [ { "Model": "bert-base-15lang-cased" } ] }, "Classification": { "accuracy": [ { "Model": "bert-base-15lang-cased", "AmazonReviewsClassification (fr)": 29.35, "MTOPDomainClassification (fr)": 63.7, "MTOPIntentClassification (fr)": 37.85, "MasakhaNEWSClassification (fra)": 63.89, "MassiveIntentClassification (fr)": 37.28, "MassiveScenarioClassification (fr)": 44.47 } ] }, "Clustering": { "v_measure": [ { "Model": "bert-base-15lang-cased", "AlloProfClusteringP2P": 53.16, "AlloProfClusteringS2S": 43.43, "HALClusteringS2S": 20.26, "MLSUMClusteringP2P": 41.22, "MLSUMClusteringS2S": 31.88, "MasakhaNEWSClusteringP2P (fra)": 24.23, "MasakhaNEWSClusteringS2S (fra)": 24.46 } ] }, "PairClassification": { "ap": [ { "Model": "bert-base-15lang-cased", "OpusparcusPC (fr)": 86.78, "PawsXPairClassification (fr)": 53.38 } ] }, "Reranking": { "map": [ { "Model": "bert-base-15lang-cased", "AlloprofReranking": 36.21, "SyntecReranking": 53.25 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "bert-base-15lang-cased", "AlloprofRetrieval": 1.61, "BSARDRetrieval": 0.0, "MintakaRetrieval (fr)": 3.55, "SyntecRetrieval": 18.95, "XPQARetrieval (fr)": 18.35 } ] }, "STS": { "spearman": [ { "Model": "bert-base-15lang-cased", "SICKFr": 58.77, "STS22 (fr)": 40.4, "STSBenchmarkMultilingualSTS (fr)": 52.25 } ] }, "Summarization": { "spearman": [ { "Model": "bert-base-15lang-cased", "SummEvalFr": 29.13 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "bert-base-15lang-cased" } ] } }, "USER-base": { "BitextMining": { "f1": [ { "Model": "USER-base", "Tatoeba (rus-Cyrl_eng-Latn)": 90.2 } ] }, "Classification": { "accuracy": [ { "Model": "USER-base", "GeoreviewClassification (rus-Cyrl)": 47.23, "HeadlineClassification (rus-Cyrl)": 74.88, "InappropriatenessClassification (rus-Cyrl)": 61.94, "KinopoiskClassification (rus-Cyrl)": 55.69, "MassiveIntentClassification (rus-Cyrl)": 65.57, "MassiveScenarioClassification (rus-Cyrl)": 68.33, "RuReviewsClassification (rus-Cyrl)": 66.44, "RuSciBenchGRNTIClassification (rus-Cyrl)": 55.55, "RuSciBenchOECDClassification (rus-Cyrl)": 43.28 } ] }, "Clustering": { "v_measure": [ { "Model": "USER-base", "GeoreviewClusteringP2P (rus-Cyrl)": 64.16, "MLSUMClusteringP2P (rus-Cyrl)": 48.09, "MLSUMClusteringS2S (rus-Cyrl)": 45.73, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 51.38, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 44.73 } ] }, "PairClassification": { "ap": [ { "Model": "USER-base", "OpusparcusPC (rus-Cyrl)": 91.65, "TERRa (rus-Cyrl)": 60.02 } ] }, "Reranking": { "map": [ { "Model": "USER-base", "RuBQReranking (rus-Cyrl)": 64.42 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "USER-base", "RiaNewsRetrieval (rus-Cyrl)": 77.83, "RuBQRetrieval (rus-Cyrl)": 56.86 } ] }, "STS": { "spearman": [ { "Model": "USER-base", "RUParaPhraserSTS (rus-Cyrl)": 73.56, "RuSTSBenchmarkSTS (rus-Cyrl)": 82.26, "STS22 (rus-Cyrl)": 63.39, "STSBenchmarkMultilingualSTS (rus-Cyrl)": 81.81 } ] }, "Summarization": { "spearman": [ { "Model": "USER-base" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "USER-base" } ] } }, "sentence-t5-base": { "BitextMining": { "f1": [ { "Model": "sentence-t5-base" } ] }, "Classification": { "accuracy": [ { "Model": "sentence-t5-base", "AmazonCounterfactualClassification (de)": 69.98, "AmazonCounterfactualClassification (en)": 75.82, "AmazonCounterfactualClassification (en-ext)": 76.81, "AmazonCounterfactualClassification (ja)": 46.05, "AmazonPolarityClassification": 85.12, "AmazonReviewsClassification (de)": 37.9, "AmazonReviewsClassification (en)": 44.94, "AmazonReviewsClassification (es)": 37.33, "AmazonReviewsClassification (fr)": 37.35, "AmazonReviewsClassification (ja)": 22.29, "AmazonReviewsClassification (zh)": 21.53, "Banking77Classification": 76.48, "EmotionClassification": 51.35, "ImdbClassification": 77.34, "MTOPDomainClassification (de)": 76.98, "MTOPDomainClassification (en)": 90.34, "MTOPDomainClassification (es)": 73.61, "MTOPDomainClassification (fr)": 75.03, "MTOPDomainClassification (hi)": 21.4, "MTOPDomainClassification (th)": 16.21, "MTOPIntentClassification (de)": 44.43, "MTOPIntentClassification (en)": 63.32, "MTOPIntentClassification (es)": 42.03, "MTOPIntentClassification (fr)": 43.85, "MTOPIntentClassification (hi)": 3.8, "MTOPIntentClassification (th)": 5.21, "MasakhaNEWSClassification (fra)": 81.21, "MassiveIntentClassification (af)": 34.32, "MassiveIntentClassification (am)": 2.38, "MassiveIntentClassification (ar)": 4.53, "MassiveIntentClassification (az)": 31.76, "MassiveIntentClassification (bn)": 2.58, "MassiveIntentClassification (cy)": 28.94, "MassiveIntentClassification (da)": 38.82, "MassiveIntentClassification (de)": 45.23, "MassiveIntentClassification (el)": 10.05, "MassiveIntentClassification (en)": 69.74, "MassiveIntentClassification (es)": 45.32, "MassiveIntentClassification (fa)": 3.58, "MassiveIntentClassification (fi)": 33.52, "MassiveIntentClassification (fr)": 51.13, "MassiveIntentClassification (he)": 2.63, "MassiveIntentClassification (hi)": 2.68, "MassiveIntentClassification (hu)": 32.31, "MassiveIntentClassification (hy)": 3.33, "MassiveIntentClassification (id)": 35.5, "MassiveIntentClassification (is)": 29.82, "MassiveIntentClassification (it)": 45.59, "MassiveIntentClassification (ja)": 3.67, "MassiveIntentClassification (jv)": 31.15, "MassiveIntentClassification (ka)": 2.77, "MassiveIntentClassification (km)": 5.66, "MassiveIntentClassification (kn)": 2.59, "MassiveIntentClassification (ko)": 2.34, "MassiveIntentClassification (lv)": 33.97, "MassiveIntentClassification (ml)": 2.55, "MassiveIntentClassification (mn)": 14.7, "MassiveIntentClassification (ms)": 33.12, "MassiveIntentClassification (my)": 4.42, "MassiveIntentClassification (nb)": 38.53, "MassiveIntentClassification (nl)": 37.96, "MassiveIntentClassification (pl)": 34.41, "MassiveIntentClassification (pt)": 43.35, "MassiveIntentClassification (ro)": 42.69, "MassiveIntentClassification (ru)": 14.82, "MassiveIntentClassification (sl)": 34.54, "MassiveIntentClassification (sq)": 38.54, "MassiveIntentClassification (sv)": 35.98, "MassiveIntentClassification (sw)": 32.14, "MassiveIntentClassification (ta)": 1.41, "MassiveIntentClassification (te)": 2.5, "MassiveIntentClassification (th)": 3.71, "MassiveIntentClassification (tl)": 36.04, "MassiveIntentClassification (tr)": 33.77, "MassiveIntentClassification (ur)": 2.99, "MassiveIntentClassification (vi)": 22.62, "MassiveIntentClassification (zh-CN)": 1.12, "MassiveIntentClassification (zh-TW)": 4.63, "MassiveScenarioClassification (af)": 44.45, "MassiveScenarioClassification (am)": 7.51, "MassiveScenarioClassification (ar)": 12.32, "MassiveScenarioClassification (az)": 38.41, "MassiveScenarioClassification (bn)": 8.45, "MassiveScenarioClassification (cy)": 35.04, "MassiveScenarioClassification (da)": 48.36, "MassiveScenarioClassification (de)": 59.12, "MassiveScenarioClassification (el)": 17.68, "MassiveScenarioClassification (en)": 72.32, "MassiveScenarioClassification (es)": 55.61, "MassiveScenarioClassification (fa)": 6.86, "MassiveScenarioClassification (fi)": 41.34, "MassiveScenarioClassification (fr)": 59.92, "MassiveScenarioClassification (he)": 7.86, "MassiveScenarioClassification (hi)": 7.63, "MassiveScenarioClassification (hu)": 41.31, "MassiveScenarioClassification (hy)": 9.23, "MassiveScenarioClassification (id)": 44.64, "MassiveScenarioClassification (is)": 39.63, "MassiveScenarioClassification (it)": 54.58, "MassiveScenarioClassification (ja)": 4.96, "MassiveScenarioClassification (jv)": 40.73, "MassiveScenarioClassification (ka)": 7.51, "MassiveScenarioClassification (km)": 8.73, "MassiveScenarioClassification (kn)": 7.99, "MassiveScenarioClassification (ko)": 6.03, "MassiveScenarioClassification (lv)": 36.42, "MassiveScenarioClassification (ml)": 6.96, "MassiveScenarioClassification (mn)": 19.85, "MassiveScenarioClassification (ms)": 43.18, "MassiveScenarioClassification (my)": 9.46, "MassiveScenarioClassification (nb)": 46.6, "MassiveScenarioClassification (nl)": 50.0, "MassiveScenarioClassification (pl)": 42.3, "MassiveScenarioClassification (pt)": 52.24, "MassiveScenarioClassification (ro)": 53.7, "MassiveScenarioClassification (ru)": 20.69, "MassiveScenarioClassification (sl)": 39.79, "MassiveScenarioClassification (sq)": 50.16, "MassiveScenarioClassification (sv)": 46.69, "MassiveScenarioClassification (sw)": 40.48, "MassiveScenarioClassification (ta)": 7.47, "MassiveScenarioClassification (te)": 6.87, "MassiveScenarioClassification (th)": 8.26, "MassiveScenarioClassification (tl)": 48.94, "MassiveScenarioClassification (tr)": 41.83, "MassiveScenarioClassification (ur)": 9.77, "MassiveScenarioClassification (vi)": 30.01, "MassiveScenarioClassification (zh-CN)": 4.17, "MassiveScenarioClassification (zh-TW)": 7.91, "ToxicConversationsClassification": 68.2, "TweetSentimentExtractionClassification": 62.71 } ] }, "Clustering": { "v_measure": [ { "Model": "sentence-t5-base", "AlloProfClusteringP2P": 58.44, "AlloProfClusteringS2S": 35.93, "ArxivClusteringP2P": 39.28, "ArxivClusteringS2S": 27.26, "BiorxivClusteringP2P": 33.99, "BiorxivClusteringS2S": 22.92, "BlurbsClusteringP2P": 30.59, "BlurbsClusteringS2S": 11.57, "HALClusteringS2S": 17.72, "MLSUMClusteringP2P": 40.77, "MLSUMClusteringS2S": 30.06, "MasakhaNEWSClusteringP2P (fra)": 61.9, "MasakhaNEWSClusteringS2S (fra)": 35.64, "MedrxivClusteringP2P": 33.2, "MedrxivClusteringS2S": 26.13, "RedditClustering": 52.93, "RedditClusteringP2P": 59.67, "StackExchangeClustering": 63.13, "StackExchangeClusteringP2P": 35.68, "TenKGnadClusteringP2P": 44.88, "TenKGnadClusteringS2S": 18.11, "TwentyNewsgroupsClustering": 48.1 } ] }, "PairClassification": { "ap": [ { "Model": "sentence-t5-base", "OpusparcusPC (fr)": 89.4, "PawsXPairClassification (fr)": 55.35, "SprintDuplicateQuestions": 91.23, "TwitterSemEval2015": 78.25, "TwitterURLCorpus": 86.05 } ] }, "Reranking": { "map": [ { "Model": "sentence-t5-base", "AlloprofReranking": 50.12, "AskUbuntuDupQuestions": 59.73, "MindSmallReranking": 30.2, "SciDocsRR": 73.96, "StackOverflowDupQuestions": 48.46, "SyntecReranking": 78.05 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "sentence-t5-base", "AlloprofRetrieval": 27.52, "ArguAna": 44.85, "BSARDRetrieval": 0.16, "CQADupstackRetrieval": 35.23, "ClimateFEVER": 10.37, "DBPedia": 27.77, "FEVER": 26.17, "FiQA2018": 34.83, "HotpotQA": 33.2, "MSMARCO": 20.7, "MintakaRetrieval (fr)": 21.04, "NFCorpus": 28.65, "NQ": 36.32, "QuoraRetrieval": 85.49, "SCIDOCS": 14.15, "SciFact": 45.76, "SyntecRetrieval": 67.0, "TRECCOVID": 40.7, "Touche2020": 20.3, "XPQARetrieval (fr)": 45.19 } ] }, "STS": { "spearman": [ { "Model": "sentence-t5-base", "BIOSSES": 75.89, "SICK-R": 80.18, "SICKFr": 71.74, "STS12": 78.05, "STS13": 85.85, "STS14": 82.19, "STS15": 87.46, "STS16": 84.03, "STS17 (ar-ar)": 13.36, "STS17 (en-ar)": -5.65, "STS17 (en-de)": 67.11, "STS17 (en-en)": 89.57, "STS17 (en-tr)": -0.02, "STS17 (es-en)": 47.72, "STS17 (es-es)": 79.94, "STS17 (fr-en)": 56.61, "STS17 (it-en)": 30.46, "STS17 (ko-ko)": 10.06, "STS17 (nl-en)": 36.46, "STS22 (ar)": 31.2, "STS22 (de)": 42.08, "STS22 (de-en)": 46.9, "STS22 (de-fr)": 55.04, "STS22 (de-pl)": 33.94, "STS22 (en)": 62.66, "STS22 (es)": 53.81, "STS22 (es-en)": 65.19, "STS22 (es-it)": 55.29, "STS22 (fr)": 77.69, "STS22 (fr-pl)": 28.17, "STS22 (it)": 60.65, "STS22 (pl)": 24.42, "STS22 (pl-en)": 42.97, "STS22 (ru)": 12.13, "STS22 (tr)": 40.45, "STS22 (zh)": 32.9, "STS22 (zh-en)": 20.15, "STSBenchmark": 85.52, "STSBenchmarkMultilingualSTS (fr)": 74.04 } ] }, "Summarization": { "spearman": [ { "Model": "sentence-t5-base", "SummEval": 31.39, "SummEvalFr": 30.01 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "sentence-t5-base" } ] } }, "st-polish-paraphrase-from-distilroberta": { "BitextMining": { "f1": [ { "Model": "st-polish-paraphrase-from-distilroberta" } ] }, "Classification": { "accuracy": [ { "Model": "st-polish-paraphrase-from-distilroberta", "AllegroReviews": 34.5, "CBD": 70.27, "MassiveIntentClassification (pl)": 64.81, "MassiveScenarioClassification (pl)": 70.01, "PAC": 64.6, "PolEmo2.0-IN": 67.06, "PolEmo2.0-OUT": 38.58 } ] }, "Clustering": { "v_measure": [ { "Model": "st-polish-paraphrase-from-distilroberta", "8TagsClustering": 31.68 } ] }, "PairClassification": { "ap": [ { "Model": "st-polish-paraphrase-from-distilroberta", "CDSC-E": 75.99, "PPC": 93.29, "PSC": 99.1, "SICK-E-PL": 79.63 } ] }, "Reranking": { "map": [ { "Model": "st-polish-paraphrase-from-distilroberta" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "st-polish-paraphrase-from-distilroberta", "ArguAna-PL": 49.42, "DBPedia-PL": 19.82, "FiQA-PL": 19.58, "HotpotQA-PL": 23.47, "MSMARCO-PL": 16.51, "NFCorpus-PL": 22.49, "NQ-PL": 19.83, "Quora-PL": 81.17, "SCIDOCS-PL": 12.15, "SciFact-PL": 49.49, "TRECCOVID-PL": 38.97 } ] }, "STS": { "spearman": [ { "Model": "st-polish-paraphrase-from-distilroberta", "CDSC-R": 89.62, "SICK-R-PL": 76.37, "STS22 (pl)": 40.36 } ] }, "Summarization": { "spearman": [ { "Model": "st-polish-paraphrase-from-distilroberta" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "st-polish-paraphrase-from-distilroberta" } ] } }, "jina-embeddings-v2-base-en": { "BitextMining": { "f1": [ { "Model": "jina-embeddings-v2-base-en" } ] }, "Classification": { "accuracy": [ { "Model": "jina-embeddings-v2-base-en" } ] }, "Clustering": { "v_measure": [ { "Model": "jina-embeddings-v2-base-en" } ] }, "PairClassification": { "ap": [ { "Model": "jina-embeddings-v2-base-en" } ] }, "Reranking": { "map": [ { "Model": "jina-embeddings-v2-base-en" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "jina-embeddings-v2-base-en", "LEMBNarrativeQARetrieval": 37.89, "LEMBNeedleRetrieval": 54.25, "LEMBPasskeyRetrieval": 50.25, "LEMBQMSumRetrieval": 38.87, "LEMBSummScreenFDRetrieval": 93.48, "LEMBWikimQARetrieval": 73.99 } ] }, "STS": { "spearman": [ { "Model": "jina-embeddings-v2-base-en" } ] }, "Summarization": { "spearman": [ { "Model": "jina-embeddings-v2-base-en" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "jina-embeddings-v2-base-en" } ] } }, "text2vec-large-chinese": { "BitextMining": { "f1": [ { "Model": "text2vec-large-chinese" } ] }, "Classification": { "accuracy": [ { "Model": "text2vec-large-chinese", "AmazonReviewsClassification (zh)": 33.77, "IFlyTek": 41.54, "JDReview": 81.56, "MassiveIntentClassification (zh-CN)": 63.23, "MassiveScenarioClassification (zh-CN)": 68.45, "MultilingualSentiment": 58.97, "OnlineShopping": 83.51, "TNews": 38.92, "Waimai": 76.01 } ] }, "Clustering": { "v_measure": [ { "Model": "text2vec-large-chinese", "CLSClusteringP2P": 30.13, "CLSClusteringS2S": 28.77, "ThuNewsClusteringP2P": 35.05, "ThuNewsClusteringS2S": 26.14 } ] }, "PairClassification": { "ap": [ { "Model": "text2vec-large-chinese", "Cmnli": 77.67, "Ocnli": 64.04 } ] }, "Reranking": { "map": [ { "Model": "text2vec-large-chinese", "CMedQAv1": 58.92, "CMedQAv2": 60.41, "MMarcoReranking": 12.48, "T2Reranking": 64.82 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "text2vec-large-chinese", "CmedqaRetrieval": 15.53, "CovidRetrieval": 60.48, "DuRetrieval": 51.87, "EcomRetrieval": 37.58, "MMarcoRetrieval": 45.96, "MedicalRetrieval": 30.93, "T2Retrieval": 50.52, "VideoRetrieval": 42.65 } ] }, "STS": { "spearman": [ { "Model": "text2vec-large-chinese", "AFQMC": 24.51, "ATEC": 32.45, "BQ": 44.22, "LCQMC": 69.16, "PAWSX": 14.55, "QBQTC": 29.51, "STS22 (zh)": 65.94, "STSB": 79.45 } ] }, "Summarization": { "spearman": [ { "Model": "text2vec-large-chinese" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "text2vec-large-chinese" } ] } }, "herbert-base-retrieval-v2": { "BitextMining": { "f1": [ { "Model": "herbert-base-retrieval-v2" } ] }, "Classification": { "accuracy": [ { "Model": "herbert-base-retrieval-v2", "AllegroReviews": 34.11, "CBD": 68.35, "MassiveIntentClassification (pl)": 65.53, "MassiveScenarioClassification (pl)": 68.51, "PAC": 68.4, "PolEmo2.0-IN": 64.18, "PolEmo2.0-OUT": 45.73 } ] }, "Clustering": { "v_measure": [ { "Model": "herbert-base-retrieval-v2", "8TagsClustering": 28.15 } ] }, "PairClassification": { "ap": [ { "Model": "herbert-base-retrieval-v2", "CDSC-E": 63.31, "PPC": 84.18, "PSC": 98.87, "SICK-E-PL": 54.93 } ] }, "Reranking": { "map": [ { "Model": "herbert-base-retrieval-v2" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "herbert-base-retrieval-v2", "ArguAna-PL": 41.97, "DBPedia-PL": 24.07, "FiQA-PL": 24.25, "HotpotQA-PL": 43.41, "MSMARCO-PL": 51.56, "NFCorpus-PL": 25.95, "NQ-PL": 35.09, "Quora-PL": 78.86, "SCIDOCS-PL": 11.0, "SciFact-PL": 51.92, "TRECCOVID-PL": 42.64 } ] }, "STS": { "spearman": [ { "Model": "herbert-base-retrieval-v2", "CDSC-R": 86.18, "SICK-R-PL": 64.67, "STS22 (pl)": 39.73 } ] }, "Summarization": { "spearman": [ { "Model": "herbert-base-retrieval-v2" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "herbert-base-retrieval-v2" } ] } }, "e5-base-v2": { "BitextMining": { "f1": [ { "Model": "e5-base-v2" } ] }, "Classification": { "accuracy": [ { "Model": "e5-base-v2" } ] }, "Clustering": { "v_measure": [ { "Model": "e5-base-v2", "BiorxivClusteringP2P": 37.12, "BiorxivClusteringS2S": 33.41, "MedrxivClusteringP2P": 31.82, "MedrxivClusteringS2S": 29.68, "RedditClustering": 56.54, "RedditClusteringP2P": 63.23, "StackExchangeClustering": 64.6, "StackExchangeClusteringP2P": 33.02, "TwentyNewsgroupsClustering": 49.86 } ] }, "PairClassification": { "ap": [ { "Model": "e5-base-v2" } ] }, "Reranking": { "map": [ { "Model": "e5-base-v2" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "e5-base-v2" } ] }, "STS": { "spearman": [ { "Model": "e5-base-v2" } ] }, "Summarization": { "spearman": [ { "Model": "e5-base-v2" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "e5-base-v2", "Core17InstructionRetrieval": -2.9, "News21InstructionRetrieval": -2.0, "Robust04InstructionRetrieval": -6.73 } ] } }, "LASER2": { "BitextMining": { "f1": [ { "Model": "LASER2", "BUCC (de-en)": 99.21, "BUCC (fr-en)": 98.39, "BUCC (ru-en)": 97.62, "BUCC (zh-en)": 97.7, "Tatoeba (afr-eng)": 92.59, "Tatoeba (amh-eng)": 80.82, "Tatoeba (ang-eng)": 25.22, "Tatoeba (ara-eng)": 90.14, "Tatoeba (arq-eng)": 26.63, "Tatoeba (arz-eng)": 66.16, "Tatoeba (ast-eng)": 76.35, "Tatoeba (awa-eng)": 33.74, "Tatoeba (aze-eng)": 82.41, "Tatoeba (bel-eng)": 79.54, "Tatoeba (ben-eng)": 89.43, "Tatoeba (ber-eng)": 77.63, "Tatoeba (bos-eng)": 95.86, "Tatoeba (bre-eng)": 31.2, "Tatoeba (bul-eng)": 93.57, "Tatoeba (cat-eng)": 95.8, "Tatoeba (cbk-eng)": 77.17, "Tatoeba (ceb-eng)": 9.93, "Tatoeba (ces-eng)": 95.52, "Tatoeba (cha-eng)": 14.86, "Tatoeba (cmn-eng)": 85.62, "Tatoeba (cor-eng)": 4.45, "Tatoeba (csb-eng)": 27.03, "Tatoeba (cym-eng)": 5.85, "Tatoeba (dan-eng)": 95.22, "Tatoeba (deu-eng)": 99.07, "Tatoeba (dsb-eng)": 42.34, "Tatoeba (dtp-eng)": 7.39, "Tatoeba (ell-eng)": 96.2, "Tatoeba (epo-eng)": 96.61, "Tatoeba (est-eng)": 96.43, "Tatoeba (eus-eng)": 93.32, "Tatoeba (fao-eng)": 57.04, "Tatoeba (fin-eng)": 96.98, "Tatoeba (fra-eng)": 94.28, "Tatoeba (fry-eng)": 42.07, "Tatoeba (gla-eng)": 1.52, "Tatoeba (gle-eng)": 4.2, "Tatoeba (glg-eng)": 96.14, "Tatoeba (gsw-eng)": 27.52, "Tatoeba (heb-eng)": 0.0, "Tatoeba (hin-eng)": 95.32, "Tatoeba (hrv-eng)": 96.72, "Tatoeba (hsb-eng)": 45.75, "Tatoeba (hun-eng)": 95.2, "Tatoeba (hye-eng)": 88.72, "Tatoeba (ido-eng)": 80.86, "Tatoeba (ile-eng)": 87.88, "Tatoeba (ina-eng)": 93.93, "Tatoeba (ind-eng)": 92.98, "Tatoeba (isl-eng)": 94.32, "Tatoeba (ita-eng)": 94.32, "Tatoeba (jav-eng)": 9.95, "Tatoeba (jpn-eng)": 93.78, "Tatoeba (kab-eng)": 65.88, "Tatoeba (kat-eng)": 81.16, "Tatoeba (kaz-eng)": 53.3, "Tatoeba (khm-eng)": 74.19, "Tatoeba (kor-eng)": 87.97, "Tatoeba (kur-eng)": 19.09, "Tatoeba (kzj-eng)": 4.46, "Tatoeba (lat-eng)": 64.81, "Tatoeba (lfn-eng)": 63.39, "Tatoeba (lit-eng)": 96.2, "Tatoeba (lvs-eng)": 95.33, "Tatoeba (mal-eng)": 98.16, "Tatoeba (mar-eng)": 92.93, "Tatoeba (max-eng)": 36.96, "Tatoeba (mhr-eng)": 6.86, "Tatoeba (mkd-eng)": 93.63, "Tatoeba (mon-eng)": 3.42, "Tatoeba (nds-eng)": 77.13, "Tatoeba (nld-eng)": 95.35, "Tatoeba (nno-eng)": 72.75, "Tatoeba (nob-eng)": 95.77, "Tatoeba (nov-eng)": 60.02, "Tatoeba (oci-eng)": 58.13, "Tatoeba (orv-eng)": 23.24, "Tatoeba (pam-eng)": 3.24, "Tatoeba (pes-eng)": 93.13, "Tatoeba (pms-eng)": 36.23, "Tatoeba (pol-eng)": 97.32, "Tatoeba (por-eng)": 94.54, "Tatoeba (ron-eng)": 96.52, "Tatoeba (rus-eng)": 92.58, "Tatoeba (slk-eng)": 95.82, "Tatoeba (slv-eng)": 95.4, "Tatoeba (spa-eng)": 97.33, "Tatoeba (sqi-eng)": 97.22, "Tatoeba (srp-eng)": 93.64, "Tatoeba (swe-eng)": 95.31, "Tatoeba (swg-eng)": 33.1, "Tatoeba (swh-eng)": 55.66, "Tatoeba (tam-eng)": 87.32, "Tatoeba (tat-eng)": 34.74, "Tatoeba (tel-eng)": 96.72, "Tatoeba (tgl-eng)": 63.19, "Tatoeba (tha-eng)": 96.38, "Tatoeba (tuk-eng)": 16.35, "Tatoeba (tur-eng)": 98.03, "Tatoeba (tzl-eng)": 36.56, "Tatoeba (uig-eng)": 56.49, "Tatoeba (ukr-eng)": 93.52, "Tatoeba (urd-eng)": 84.23, "Tatoeba (uzb-eng)": 23.2, "Tatoeba (vie-eng)": 96.73, "Tatoeba (war-eng)": 8.25, "Tatoeba (wuu-eng)": 75.09, "Tatoeba (xho-eng)": 4.68, "Tatoeba (yid-eng)": 2.49, "Tatoeba (yue-eng)": 87.75, "Tatoeba (zsm-eng)": 95.41 } ] }, "Classification": { "accuracy": [ { "Model": "LASER2", "AmazonCounterfactualClassification (de)": 67.82, "AmazonCounterfactualClassification (en)": 76.84, "AmazonCounterfactualClassification (en-ext)": 76.17, "AmazonCounterfactualClassification (ja)": 68.76, "AmazonPolarityClassification": 61.01, "AmazonReviewsClassification (de)": 31.07, "AmazonReviewsClassification (en)": 28.71, "AmazonReviewsClassification (es)": 32.72, "AmazonReviewsClassification (fr)": 31.12, "AmazonReviewsClassification (ja)": 28.94, "AmazonReviewsClassification (zh)": 30.89, "Banking77Classification": 57.76, "EmotionClassification": 24.83, "ImdbClassification": 57.58, "MTOPDomainClassification (de)": 74.08, "MTOPDomainClassification (en)": 75.36, "MTOPDomainClassification (es)": 73.47, "MTOPDomainClassification (fr)": 72.26, "MTOPDomainClassification (hi)": 72.95, "MTOPDomainClassification (th)": 72.68, "MTOPIntentClassification (de)": 51.62, "MTOPIntentClassification (en)": 49.47, "MTOPIntentClassification (es)": 52.75, "MTOPIntentClassification (fr)": 50.12, "MTOPIntentClassification (hi)": 45.55, "MTOPIntentClassification (th)": 50.07, "MasakhaNEWSClassification (fra)": 65.9, "MassiveIntentClassification (af)": 38.01, "MassiveIntentClassification (am)": 12.7, "MassiveIntentClassification (ar)": 37.16, "MassiveIntentClassification (az)": 19.98, "MassiveIntentClassification (bn)": 42.51, "MassiveIntentClassification (cy)": 17.33, "MassiveIntentClassification (da)": 45.61, "MassiveIntentClassification (de)": 44.79, "MassiveIntentClassification (el)": 46.71, "MassiveIntentClassification (en)": 47.91, "MassiveIntentClassification (es)": 45.44, "MassiveIntentClassification (fa)": 45.01, "MassiveIntentClassification (fi)": 45.94, "MassiveIntentClassification (fr)": 46.13, "MassiveIntentClassification (he)": 42.55, "MassiveIntentClassification (hi)": 40.2, "MassiveIntentClassification (hu)": 42.77, "MassiveIntentClassification (hy)": 28.07, "MassiveIntentClassification (id)": 45.81, "MassiveIntentClassification (is)": 39.86, "MassiveIntentClassification (it)": 48.25, "MassiveIntentClassification (ja)": 45.3, "MassiveIntentClassification (jv)": 24.3, "MassiveIntentClassification (ka)": 22.7, "MassiveIntentClassification (km)": 22.48, "MassiveIntentClassification (kn)": 4.32, "MassiveIntentClassification (ko)": 44.26, "MassiveIntentClassification (lv)": 39.75, "MassiveIntentClassification (ml)": 41.33, "MassiveIntentClassification (mn)": 16.2, "MassiveIntentClassification (ms)": 43.23, "MassiveIntentClassification (my)": 25.37, "MassiveIntentClassification (nb)": 37.74, "MassiveIntentClassification (nl)": 45.0, "MassiveIntentClassification (pl)": 44.99, "MassiveIntentClassification (pt)": 48.55, "MassiveIntentClassification (ro)": 44.3, "MassiveIntentClassification (ru)": 44.29, "MassiveIntentClassification (sl)": 44.72, "MassiveIntentClassification (sq)": 46.12, "MassiveIntentClassification (sv)": 45.95, "MassiveIntentClassification (sw)": 31.89, "MassiveIntentClassification (ta)": 29.63, "MassiveIntentClassification (te)": 36.03, "MassiveIntentClassification (th)": 43.39, "MassiveIntentClassification (tl)": 29.73, "MassiveIntentClassification (tr)": 43.93, "MassiveIntentClassification (ur)": 26.11, "MassiveIntentClassification (vi)": 44.33, "MassiveIntentClassification (zh-CN)": 40.62, "MassiveIntentClassification (zh-TW)": 32.93, "MassiveScenarioClassification (af)": 47.1, "MassiveScenarioClassification (am)": 17.7, "MassiveScenarioClassification (ar)": 45.21, "MassiveScenarioClassification (az)": 28.21, "MassiveScenarioClassification (bn)": 50.52, "MassiveScenarioClassification (cy)": 22.58, "MassiveScenarioClassification (da)": 54.87, "MassiveScenarioClassification (de)": 54.34, "MassiveScenarioClassification (el)": 55.47, "MassiveScenarioClassification (en)": 55.92, "MassiveScenarioClassification (es)": 52.77, "MassiveScenarioClassification (fa)": 52.5, "MassiveScenarioClassification (fi)": 52.63, "MassiveScenarioClassification (fr)": 54.32, "MassiveScenarioClassification (he)": 52.41, "MassiveScenarioClassification (hi)": 47.37, "MassiveScenarioClassification (hu)": 53.43, "MassiveScenarioClassification (hy)": 33.57, "MassiveScenarioClassification (id)": 54.38, "MassiveScenarioClassification (is)": 49.78, "MassiveScenarioClassification (it)": 54.84, "MassiveScenarioClassification (ja)": 54.12, "MassiveScenarioClassification (jv)": 32.71, "MassiveScenarioClassification (ka)": 26.92, "MassiveScenarioClassification (km)": 27.23, "MassiveScenarioClassification (kn)": 10.06, "MassiveScenarioClassification (ko)": 52.01, "MassiveScenarioClassification (lv)": 44.82, "MassiveScenarioClassification (ml)": 49.1, "MassiveScenarioClassification (mn)": 21.51, "MassiveScenarioClassification (ms)": 53.6, "MassiveScenarioClassification (my)": 29.72, "MassiveScenarioClassification (nb)": 43.9, "MassiveScenarioClassification (nl)": 53.33, "MassiveScenarioClassification (pl)": 52.92, "MassiveScenarioClassification (pt)": 53.41, "MassiveScenarioClassification (ro)": 50.48, "MassiveScenarioClassification (ru)": 51.84, "MassiveScenarioClassification (sl)": 51.29, "MassiveScenarioClassification (sq)": 55.65, "MassiveScenarioClassification (sv)": 54.64, "MassiveScenarioClassification (sw)": 42.04, "MassiveScenarioClassification (ta)": 36.72, "MassiveScenarioClassification (te)": 42.08, "MassiveScenarioClassification (th)": 52.15, "MassiveScenarioClassification (tl)": 37.34, "MassiveScenarioClassification (tr)": 52.56, "MassiveScenarioClassification (ur)": 32.6, "MassiveScenarioClassification (vi)": 50.97, "MassiveScenarioClassification (zh-CN)": 50.22, "MassiveScenarioClassification (zh-TW)": 42.32, "ToxicConversationsClassification": 54.05, "TweetSentimentExtractionClassification": 48.73 } ] }, "Clustering": { "v_measure": [ { "Model": "LASER2", "AlloProfClusteringP2P": 48.45, "AlloProfClusteringS2S": 25.81, "ArxivClusteringP2P": 17.77, "ArxivClusteringS2S": 12.39, "BiorxivClusteringP2P": 12.4, "BiorxivClusteringS2S": 8.83, "HALClusteringS2S": 11.52, "MLSUMClusteringP2P": 34.53, "MLSUMClusteringS2S": 27.35, "MasakhaNEWSClusteringP2P (fra)": 32.04, "MasakhaNEWSClusteringS2S (fra)": 29.77, "MedrxivClusteringP2P": 17.91, "MedrxivClusteringS2S": 16.63, "RedditClustering": 9.96, "RedditClusteringP2P": 26.42, "StackExchangeClustering": 15.79, "StackExchangeClusteringP2P": 18.63, "TwentyNewsgroupsClustering": 11.38 } ] }, "PairClassification": { "ap": [ { "Model": "LASER2", "OpusparcusPC (fr)": 93.77, "PawsXPairClassification (fr)": 69.53, "SprintDuplicateQuestions": 65.54, "TwitterSemEval2015": 59.57, "TwitterURLCorpus": 81.47 } ] }, "Reranking": { "map": [ { "Model": "LASER2", "AlloprofReranking": 35.29, "AskUbuntuDupQuestions": 48.99, "MindSmallReranking": 24.79, "SciDocsRR": 54.99, "StackOverflowDupQuestions": 36.98, "SyntecReranking": 55.93 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "LASER2", "AlloprofRetrieval": 3.1, "ArguAna": 12.86, "BSARDRetrieval": 0.36, "CQADupstackRetrieval": 4.12, "ClimateFEVER": 0.36, "DBPedia": 1.53, "FEVER": 0.77, "FiQA2018": 1.73, "HotpotQA": 5.5, "MSMARCO": 1.09, "MintakaRetrieval (fr)": 6.31, "NFCorpus": 2.44, "NQ": 0.64, "QuoraRetrieval": 71.14, "SCIDOCS": 0.78, "SciFact": 4.04, "SyntecRetrieval": 28.58, "TRECCOVID": 10.97, "Touche2020": 1.06, "XPQARetrieval (fr)": 42.59 } ] }, "STS": { "spearman": [ { "Model": "LASER2", "BIOSSES": 62.01, "SICK-R": 62.86, "SICKFr": 64.95, "STS12": 62.6, "STS13": 59.62, "STS14": 57.03, "STS15": 71.57, "STS16": 70.75, "STS17 (ar-ar)": 67.47, "STS17 (en-ar)": 65.05, "STS17 (en-de)": 66.66, "STS17 (en-en)": 76.73, "STS17 (en-tr)": 70.05, "STS17 (es-en)": 55.3, "STS17 (es-es)": 79.67, "STS17 (fr-en)": 70.82, "STS17 (it-en)": 70.98, "STS17 (ko-ko)": 70.52, "STS17 (nl-en)": 68.12, "STS22 (ar)": 42.57, "STS22 (de)": 25.69, "STS22 (de-en)": 32.35, "STS22 (de-fr)": 37.41, "STS22 (de-pl)": 15.67, "STS22 (en)": 39.76, "STS22 (es)": 54.92, "STS22 (es-en)": 54.34, "STS22 (es-it)": 42.21, "STS22 (fr)": 58.61, "STS22 (fr-pl)": 39.44, "STS22 (it)": 60.31, "STS22 (pl)": 18.34, "STS22 (pl-en)": 53.63, "STS22 (ru)": 39.24, "STS22 (tr)": 36.97, "STS22 (zh)": 49.41, "STS22 (zh-en)": 46.19, "STSBenchmark": 69.77, "STSBenchmarkMultilingualSTS (fr)": 69.82 } ] }, "Summarization": { "spearman": [ { "Model": "LASER2", "SummEval": 26.8, "SummEvalFr": 31.56 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "LASER2" } ] } }, "udever-bloom-1b1": { "BitextMining": { "f1": [ { "Model": "udever-bloom-1b1" } ] }, "Classification": { "accuracy": [ { "Model": "udever-bloom-1b1", "AmazonReviewsClassification (fr)": 35.12, "MTOPDomainClassification (fr)": 69.24, "MTOPIntentClassification (fr)": 51.25, "MasakhaNEWSClassification (fra)": 80.83, "MassiveIntentClassification (fr)": 43.21, "MassiveScenarioClassification (fr)": 49.78 } ] }, "Clustering": { "v_measure": [ { "Model": "udever-bloom-1b1", "AlloProfClusteringP2P": 62.22, "AlloProfClusteringS2S": 27.06, "HALClusteringS2S": 13.86, "MLSUMClusteringP2P": 44.11, "MLSUMClusteringS2S": 30.47, "MasakhaNEWSClusteringP2P (fra)": 40.2, "MasakhaNEWSClusteringS2S (fra)": 27.35 } ] }, "PairClassification": { "ap": [ { "Model": "udever-bloom-1b1", "OpusparcusPC (fr)": 85.54, "PawsXPairClassification (fr)": 61.99 } ] }, "Reranking": { "map": [ { "Model": "udever-bloom-1b1", "AlloprofReranking": 39.13, "SyntecReranking": 62.58 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "udever-bloom-1b1", "AlloprofRetrieval": 12.37, "BSARDRetrieval": 0.0, "MintakaRetrieval (fr)": 2.78, "SyntecRetrieval": 40.57, "XPQARetrieval (fr)": 33.82 } ] }, "STS": { "spearman": [ { "Model": "udever-bloom-1b1", "SICKFr": 59.94, "STS22 (fr)": 77.1, "STSBenchmarkMultilingualSTS (fr)": 49.97 } ] }, "Summarization": { "spearman": [ { "Model": "udever-bloom-1b1", "SummEvalFr": 29.48 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "udever-bloom-1b1" } ] } }, "deberta-v1-base": { "BitextMining": { "f1": [ { "Model": "deberta-v1-base", "Tatoeba (rus-Cyrl_eng-Latn)": 13.21 } ] }, "Classification": { "accuracy": [ { "Model": "deberta-v1-base", "GeoreviewClassification (rus-Cyrl)": 40.19, "HeadlineClassification (rus-Cyrl)": 78.75, "InappropriatenessClassification (rus-Cyrl)": 61.33, "KinopoiskClassification (rus-Cyrl)": 48.78, "MassiveIntentClassification (rus-Cyrl)": 61.32, "MassiveScenarioClassification (rus-Cyrl)": 64.71, "RuReviewsClassification (rus-Cyrl)": 55.66, "RuSciBenchGRNTIClassification (rus-Cyrl)": 53.53, "RuSciBenchOECDClassification (rus-Cyrl)": 41.34 } ] }, "Clustering": { "v_measure": [ { "Model": "deberta-v1-base", "GeoreviewClusteringP2P (rus-Cyrl)": 58.79, "MLSUMClusteringP2P (rus-Cyrl)": 47.33, "MLSUMClusteringS2S (rus-Cyrl)": 44.6, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 36.66, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 33.31 } ] }, "PairClassification": { "ap": [ { "Model": "deberta-v1-base", "OpusparcusPC (rus-Cyrl)": 83.31, "TERRa (rus-Cyrl)": 53.78 } ] }, "Reranking": { "map": [ { "Model": "deberta-v1-base", "RuBQReranking (rus-Cyrl)": 34.01 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "deberta-v1-base", "RiaNewsRetrieval (rus-Cyrl)": 4.84, "RuBQRetrieval (rus-Cyrl)": 10.15 } ] }, "STS": { "spearman": [ { "Model": "deberta-v1-base", "RUParaPhraserSTS (rus-Cyrl)": 54.03, "RuSTSBenchmarkSTS (rus-Cyrl)": 58.47, "STS22 (rus-Cyrl)": 47.67, "STSBenchmarkMultilingualSTS (rus-Cyrl)": 58.45 } ] }, "Summarization": { "spearman": [ { "Model": "deberta-v1-base" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "deberta-v1-base" } ] } }, "gte-Qwen2-7B-instruct": { "BitextMining": { "f1": [ { "Model": "gte-Qwen2-7B-instruct" } ] }, "Classification": { "accuracy": [ { "Model": "gte-Qwen2-7B-instruct" } ] }, "Clustering": { "v_measure": [ { "Model": "gte-Qwen2-7B-instruct" } ] }, "PairClassification": { "ap": [ { "Model": "gte-Qwen2-7B-instruct" } ] }, "Reranking": { "map": [ { "Model": "gte-Qwen2-7B-instruct" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "gte-Qwen2-7B-instruct", "BrightRetrieval (earth_science)": 40.66, "BrightRetrieval (sustainable_living)": 20.82, "BrightRetrieval (theoremqa_theorems)": 28.15, "BrightRetrieval (aops)": 15.1, "BrightRetrieval (economics)": 16.18, "BrightRetrieval (pony)": 1.25, "BrightRetrieval (stackoverflow)": 13.95, "BrightRetrieval (leetcode)": 31.07, "BrightRetrieval (biology)": 32.09, "BrightRetrieval (theoremqa_questions)": 29.9, "BrightRetrieval (robotics)": 12.82, "BrightRetrieval (psychology)": 26.58 } ] }, "STS": { "spearman": [ { "Model": "gte-Qwen2-7B-instruct" } ] }, "Summarization": { "spearman": [ { "Model": "gte-Qwen2-7B-instruct" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "gte-Qwen2-7B-instruct" } ] } }, "contriever-base-msmarco": { "BitextMining": { "f1": [ { "Model": "contriever-base-msmarco" } ] }, "Classification": { "accuracy": [ { "Model": "contriever-base-msmarco", "AmazonCounterfactualClassification (en)": 72.19, "AmazonPolarityClassification": 68.63, "AmazonReviewsClassification (en)": 37.42, "Banking77Classification": 80.02, "EmotionClassification": 44.77, "ImdbClassification": 67.04, "MTOPDomainClassification (en)": 93.18, "MTOPIntentClassification (en)": 69.31, "MassiveIntentClassification (en)": 67.78, "MassiveScenarioClassification (en)": 76.0, "ToxicConversationsClassification": 67.77, "TweetSentimentExtractionClassification": 56.1 } ] }, "Clustering": { "v_measure": [ { "Model": "contriever-base-msmarco", "ArxivClusteringP2P": 42.61, "ArxivClusteringS2S": 32.32, "BiorxivClusteringP2P": 34.97, "BiorxivClusteringS2S": 29.08, "MedrxivClusteringP2P": 31.19, "MedrxivClusteringS2S": 27.27, "RedditClustering": 54.89, "RedditClusteringP2P": 57.58, "StackExchangeClustering": 63.15, "StackExchangeClusteringP2P": 32.25, "TwentyNewsgroupsClustering": 46.82 } ] }, "PairClassification": { "ap": [ { "Model": "contriever-base-msmarco", "SprintDuplicateQuestions": 95.55, "TwitterSemEval2015": 66.85, "TwitterURLCorpus": 85.21 } ] }, "Reranking": { "map": [ { "Model": "contriever-base-msmarco", "AskUbuntuDupQuestions": 56.69, "MindSmallReranking": 31.58, "SciDocsRR": 76.51, "StackOverflowDupQuestions": 47.78 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "contriever-base-msmarco", "ArguAna": 48.32, "CQADupstackRetrieval": 33.67, "ClimateFEVER": 24.79, "DBPedia": 38.1, "FEVER": 59.29, "FiQA2018": 27.42, "HotpotQA": 56.81, "MSMARCO": 36.77, "NFCorpus": 31.32, "NQ": 41.83, "QuoraRetrieval": 86.72, "SCIDOCS": 17.12, "SciFact": 65.51, "TRECCOVID": 44.77, "Touche2020": 15.79 } ] }, "STS": { "spearman": [ { "Model": "contriever-base-msmarco", "BIOSSES": 83.32, "SICK-R": 70.2, "STS12": 64.34, "STS13": 80.03, "STS14": 74.51, "STS15": 83.3, "STS16": 79.67, "STS17 (en-en)": 86.32, "STS22 (en)": 64.64, "STSBenchmark": 78.81 } ] }, "Summarization": { "spearman": [ { "Model": "contriever-base-msmarco", "SummEval": 30.36 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "contriever-base-msmarco", "Core17InstructionRetrieval": -2.48, "News21InstructionRetrieval": -2.83, "Robust04InstructionRetrieval": -6.12 } ] } }, "unsup-simcse-bert-base-uncased": { "BitextMining": { "f1": [ { "Model": "unsup-simcse-bert-base-uncased" } ] }, "Classification": { "accuracy": [ { "Model": "unsup-simcse-bert-base-uncased", "AmazonCounterfactualClassification (en)": 67.09, "AmazonPolarityClassification": 74.48, "AmazonReviewsClassification (en)": 33.85, "Banking77Classification": 73.55, "EmotionClassification": 42.22, "ImdbClassification": 69.63, "MTOPDomainClassification (en)": 81.71, "MTOPIntentClassification (en)": 59.23, "MassiveIntentClassification (en)": 59.84, "MassiveScenarioClassification (en)": 66.25, "ToxicConversationsClassification": 68.82, "TweetSentimentExtractionClassification": 53.36 } ] }, "Clustering": { "v_measure": [ { "Model": "unsup-simcse-bert-base-uncased", "ArxivClusteringP2P": 32.61, "ArxivClusteringS2S": 24.68, "BiorxivClusteringP2P": 24.9, "BiorxivClusteringS2S": 19.55, "MedrxivClusteringP2P": 23.6, "MedrxivClusteringS2S": 21.97, "RedditClustering": 32.18, "RedditClusteringP2P": 45.14, "StackExchangeClustering": 43.07, "StackExchangeClusteringP2P": 28.5, "TwentyNewsgroupsClustering": 23.21 } ] }, "PairClassification": { "ap": [ { "Model": "unsup-simcse-bert-base-uncased", "SprintDuplicateQuestions": 69.41, "TwitterSemEval2015": 60.21, "TwitterURLCorpus": 81.37 } ] }, "Reranking": { "map": [ { "Model": "unsup-simcse-bert-base-uncased", "AskUbuntuDupQuestions": 51.57, "MindSmallReranking": 28.62, "SciDocsRR": 66.33, "StackOverflowDupQuestions": 39.35 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "unsup-simcse-bert-base-uncased", "ArguAna": 38.34, "CQADupstackRetrieval": 13.22, "ClimateFEVER": 11.8, "DBPedia": 15.04, "FEVER": 21.06, "FiQA2018": 9.84, "HotpotQA": 19.75, "MSMARCO": 9.35, "NFCorpus": 9.88, "NQ": 11.69, "QuoraRetrieval": 78.03, "SCIDOCS": 5.5, "SciFact": 25.72, "TRECCOVID": 26.2, "Touche2020": 8.9 } ] }, "STS": { "spearman": [ { "Model": "unsup-simcse-bert-base-uncased", "BIOSSES": 72.31, "SICK-R": 72.24, "STS12": 66.05, "STS13": 81.49, "STS14": 73.61, "STS15": 79.72, "STS16": 78.12, "STS17 (en-en)": 83.58, "STS22 (en)": 59.65, "STSBenchmark": 76.52 } ] }, "Summarization": { "spearman": [ { "Model": "unsup-simcse-bert-base-uncased", "SummEval": 31.15 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "unsup-simcse-bert-base-uncased" } ] } }, "SFR-Embedding-Mistral": { "BitextMining": { "f1": [ { "Model": "SFR-Embedding-Mistral" } ] }, "Classification": { "accuracy": [ { "Model": "SFR-Embedding-Mistral" } ] }, "Clustering": { "v_measure": [ { "Model": "SFR-Embedding-Mistral" } ] }, "PairClassification": { "ap": [ { "Model": "SFR-Embedding-Mistral" } ] }, "Reranking": { "map": [ { "Model": "SFR-Embedding-Mistral" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "SFR-Embedding-Mistral", "BrightRetrieval (sustainable_living)": 19.79, "BrightRetrieval (economics)": 17.84, "BrightRetrieval (theoremqa_theorems)": 24.05, "BrightRetrieval (aops)": 7.43, "BrightRetrieval (theoremqa_questions)": 23.05, "BrightRetrieval (psychology)": 18.97, "BrightRetrieval (stackoverflow)": 12.72, "BrightRetrieval (pony)": 1.97, "BrightRetrieval (leetcode)": 27.35, "BrightRetrieval (biology)": 19.49, "BrightRetrieval (earth_science)": 26.63, "BrightRetrieval (robotics)": 16.7 } ] }, "STS": { "spearman": [ { "Model": "SFR-Embedding-Mistral" } ] }, "Summarization": { "spearman": [ { "Model": "SFR-Embedding-Mistral" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "SFR-Embedding-Mistral" } ] } }, "text-embedding-3-small": { "BitextMining": { "f1": [ { "Model": "text-embedding-3-small" } ] }, "Classification": { "accuracy": [ { "Model": "text-embedding-3-small", "AmazonCounterfactualClassification (en)": 76.42, "AmazonPolarityClassification": 90.84, "AmazonReviewsClassification (en)": 45.73, "Banking77Classification": 83.01, "EmotionClassification": 50.63, "ImdbClassification": 83.66, "MTOPDomainClassification (en)": 93.91, "MTOPIntentClassification (en)": 70.98, "MassiveIntentClassification (en)": 72.86, "MassiveScenarioClassification (en)": 76.84, "ToxicConversationsClassification": 71.91, "TweetSentimentExtractionClassification": 61.72 } ] }, "Clustering": { "v_measure": [ { "Model": "text-embedding-3-small", "ArxivClusteringP2P": 46.57, "ArxivClusteringS2S": 39.35, "BiorxivClusteringP2P": 37.77, "BiorxivClusteringS2S": 34.68, "MedrxivClusteringP2P": 32.77, "MedrxivClusteringS2S": 31.85, "RedditClustering": 64.09, "RedditClusteringP2P": 65.12, "StackExchangeClustering": 72.05, "StackExchangeClusteringP2P": 34.04, "TwentyNewsgroupsClustering": 54.81 } ] }, "PairClassification": { "ap": [ { "Model": "text-embedding-3-small", "OpusparcusPC (fr)": 94.45, "SprintDuplicateQuestions": 94.58, "TwitterSemEval2015": 73.33, "TwitterURLCorpus": 87.21 } ] }, "Reranking": { "map": [ { "Model": "text-embedding-3-small", "AskUbuntuDupQuestions": 62.18, "MindSmallReranking": 29.93, "SciDocsRR": 83.25, "StackOverflowDupQuestions": 51.53 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "text-embedding-3-small", "ARCChallenge": 14.63, "AlphaNLI": 30.61, "ArguAna": 55.49, "CQADupstackRetrieval": 42.58, "ClimateFEVER": 26.86, "DBPedia": 39.97, "FEVER": 79.42, "FiQA2018": 44.91, "HellaSwag": 30.94, "HotpotQA": 63.63, "MSMARCO": 37.02, "NFCorpus": 38.33, "NQ": 52.86, "PIQA": 33.69, "Quail": 6.11, "QuoraRetrieval": 88.83, "RARbCode": 72.03, "RARbMath": 71.07, "SCIDOCS": 20.8, "SIQA": 3.03, "SciFact": 73.37, "SpartQA": 6.63, "TRECCOVID": 77.9, "TempReasonL1": 2.35, "TempReasonL2Fact": 25.68, "TempReasonL2Pure": 2.76, "TempReasonL3Fact": 22.09, "TempReasonL3Pure": 9.79, "Touche2020": 24.28, "WinoGrande": 31.53 } ] }, "STS": { "spearman": [ { "Model": "text-embedding-3-small", "BIOSSES": 88.72, "SICK-R": 76.73, "STS12": 73.09, "STS13": 84.92, "STS14": 79.81, "STS15": 88.01, "STS16": 84.41, "STS17 (en-en)": 90.94, "STS22 (en)": 64.96, "STSBenchmark": 84.24 } ] }, "Summarization": { "spearman": [ { "Model": "text-embedding-3-small", "SummEval": 31.12 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "text-embedding-3-small" } ] } }, "Cohere-embed-english-v3.0-instruct": { "BitextMining": { "f1": [ { "Model": "Cohere-embed-english-v3.0-instruct" } ] }, "Classification": { "accuracy": [ { "Model": "Cohere-embed-english-v3.0-instruct" } ] }, "Clustering": { "v_measure": [ { "Model": "Cohere-embed-english-v3.0-instruct" } ] }, "PairClassification": { "ap": [ { "Model": "Cohere-embed-english-v3.0-instruct" } ] }, "Reranking": { "map": [ { "Model": "Cohere-embed-english-v3.0-instruct" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "Cohere-embed-english-v3.0-instruct", "ARCChallenge": 10.1, "AlphaNLI": 18.75, "HellaSwag": 29.02, "PIQA": 27.89, "Quail": 7.77, "RARbCode": 56.56, "RARbMath": 72.05, "SIQA": 5.03, "SpartQA": 3.33, "TempReasonL1": 1.43, "TempReasonL2Fact": 40.46, "TempReasonL2Pure": 2.39, "TempReasonL3Fact": 33.87, "TempReasonL3Pure": 7.52, "WinoGrande": 65.02 } ] }, "STS": { "spearman": [ { "Model": "Cohere-embed-english-v3.0-instruct" } ] }, "Summarization": { "spearman": [ { "Model": "Cohere-embed-english-v3.0-instruct" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "Cohere-embed-english-v3.0-instruct" } ] } }, "voyage-2": { "BitextMining": { "f1": [ { "Model": "voyage-2" } ] }, "Classification": { "accuracy": [ { "Model": "voyage-2", "AmazonReviewsClassification (fr)": 37.26, "MTOPDomainClassification (fr)": 79.79, "MTOPIntentClassification (fr)": 45.62, "MasakhaNEWSClassification (fra)": 80.19, "MassiveIntentClassification (fr)": 53.7, "MassiveScenarioClassification (fr)": 62.46 } ] }, "Clustering": { "v_measure": [ { "Model": "voyage-2", "AlloProfClusteringP2P": 57.96, "AlloProfClusteringS2S": 41.65, "HALClusteringS2S": 24.84, "MLSUMClusteringP2P": 45.08, "MLSUMClusteringS2S": 38.77, "MasakhaNEWSClusteringP2P (fra)": 48.54, "MasakhaNEWSClusteringS2S (fra)": 36.33 } ] }, "PairClassification": { "ap": [ { "Model": "voyage-2", "OpusparcusPC (fr)": 89.76, "PawsXPairClassification (fr)": 58.96 } ] }, "Reranking": { "map": [ { "Model": "voyage-2", "AlloprofReranking": 63.54, "SyntecReranking": 82.65 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "voyage-2", "AlloprofRetrieval": 45.5, "BSARDRetrieval": 0.15, "MintakaRetrieval (fr)": 15.51, "SyntecRetrieval": 75.83, "XPQARetrieval (fr)": 67.07 } ] }, "STS": { "spearman": [ { "Model": "voyage-2", "SICKFr": 68.51, "STS22 (fr)": 70.51, "STSBenchmarkMultilingualSTS (fr)": 76.43 } ] }, "Summarization": { "spearman": [ { "Model": "voyage-2", "SummEvalFr": 30.88 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "voyage-2" } ] } }, "e5-base": { "BitextMining": { "f1": [ { "Model": "e5-base", "BornholmBitextMining": 40.09 } ] }, "Classification": { "accuracy": [ { "Model": "e5-base", "AngryTweetsClassification": 45.06, "DKHateClassification": 58.51, "DanishPoliticalCommentsClassification": 28.43, "LccSentimentClassification": 37.47, "MassiveIntentClassification (da)": 44.25, "MassiveIntentClassification (nb)": 41.57, "MassiveIntentClassification (sv)": 41.34, "MassiveScenarioClassification (da)": 52.99, "MassiveScenarioClassification (nb)": 50.33, "MassiveScenarioClassification (sv)": 50.0, "NoRecClassification": 42.0, "NordicLangClassification": 59.34, "NorwegianParliament": 57.42, "ScalaDaClassification": 50.08, "ScalaNbClassification": 50.18 } ] }, "Clustering": { "v_measure": [ { "Model": "e5-base" } ] }, "PairClassification": { "ap": [ { "Model": "e5-base" } ] }, "Reranking": { "map": [ { "Model": "e5-base" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "e5-base", "LEMBNarrativeQARetrieval": 25.31, "LEMBNeedleRetrieval": 28.5, "LEMBPasskeyRetrieval": 33.25, "LEMBQMSumRetrieval": 23.83, "LEMBSummScreenFDRetrieval": 74.67, "LEMBWikimQARetrieval": 55.85 } ] }, "STS": { "spearman": [ { "Model": "e5-base" } ] }, "Summarization": { "spearman": [ { "Model": "e5-base" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "e5-base" } ] } }, "elser-v2": { "BitextMining": { "f1": [ { "Model": "elser-v2" } ] }, "Classification": { "accuracy": [ { "Model": "elser-v2", "AmazonCounterfactualClassification (en)": 74.16, "AmazonPolarityClassification": 61.91, "AmazonReviewsClassification (en)": 32.06, "Banking77Classification": 82.05, "EmotionClassification": 46.65, "ImdbClassification": 65.02, "MTOPDomainClassification (en)": 93.17, "MTOPIntentClassification (en)": 71.1, "MassiveIntentClassification (en)": 68.48, "MassiveScenarioClassification (en)": 74.98, "ToxicConversationsClassification": 68.15, "TweetSentimentExtractionClassification": 53.57 } ] }, "Clustering": { "v_measure": [ { "Model": "elser-v2", "ArxivClusteringP2P": 35.27, "ArxivClusteringS2S": 23.18, "BiorxivClusteringP2P": 31.13, "BiorxivClusteringS2S": 26.78, "MedrxivClusteringP2P": 24.65, "MedrxivClusteringS2S": 24.21, "RedditClustering": 38.74, "RedditClusteringP2P": 51.92, "StackExchangeClustering": 42.7, "StackExchangeClusteringP2P": 28.7, "TwentyNewsgroupsClustering": 27.82 } ] }, "PairClassification": { "ap": [ { "Model": "elser-v2", "SprintDuplicateQuestions": 94.53, "TwitterSemEval2015": 64.41, "TwitterURLCorpus": 85.01 } ] }, "Reranking": { "map": [ { "Model": "elser-v2", "AskUbuntuDupQuestions": 58.31, "MindSmallReranking": 30.75, "SciDocsRR": 75.62, "StackOverflowDupQuestions": 48.4 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "elser-v2", "ArguAna": 55.98, "CQADupstackRetrieval": 34.27, "ClimateFEVER": 27.08, "DBPedia": 42.7, "FEVER": 78.55, "FiQA2018": 41.57, "HotpotQA": 67.01, "MSMARCO": 38.9, "NFCorpus": 36.66, "NQ": 55.84, "QuoraRetrieval": 84.69, "SCIDOCS": 16.24, "SciFact": 71.8, "TRECCOVID": 72.72, "Touche2020": 26.27 } ] }, "STS": { "spearman": [ { "Model": "elser-v2", "BIOSSES": 83.79, "SICK-R": 68.78, "STS12": 64.81, "STS13": 80.1, "STS14": 74.96, "STS15": 83.7, "STS16": 80.55, "STS17 (en-en)": 85.74, "STS22 (en)": 67.5, "STSBenchmark": 79.54 } ] }, "Summarization": { "spearman": [ { "Model": "elser-v2", "SummEval": 31.03 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "elser-v2" } ] } }, "gtr-t5-xxl": { "BitextMining": { "f1": [ { "Model": "gtr-t5-xxl" } ] }, "Classification": { "accuracy": [ { "Model": "gtr-t5-xxl", "AmazonCounterfactualClassification (en)": 67.3, "AmazonPolarityClassification": 75.05, "AmazonReviewsClassification (en)": 37.3, "Banking77Classification": 82.32, "EmotionClassification": 43.19, "ImdbClassification": 70.8, "MTOPDomainClassification (en)": 93.84, "MTOPIntentClassification (en)": 67.71, "MassiveIntentClassification (en)": 70.61, "MassiveScenarioClassification (en)": 77.77, "ToxicConversationsClassification": 68.48, "TweetSentimentExtractionClassification": 54.54 } ] }, "Clustering": { "v_measure": [ { "Model": "gtr-t5-xxl", "ArxivClusteringP2P": 37.9, "ArxivClusteringS2S": 32.39, "BiorxivClusteringP2P": 30.48, "BiorxivClusteringS2S": 27.5, "MedrxivClusteringP2P": 29.12, "MedrxivClusteringS2S": 27.56, "RedditClustering": 64.13, "RedditClusteringP2P": 62.84, "StackExchangeClustering": 71.43, "StackExchangeClusteringP2P": 32.85, "TwentyNewsgroupsClustering": 50.44 } ] }, "PairClassification": { "ap": [ { "Model": "gtr-t5-xxl", "SprintDuplicateQuestions": 95.68, "TwitterSemEval2015": 77.54, "TwitterURLCorpus": 85.13 } ] }, "Reranking": { "map": [ { "Model": "gtr-t5-xxl", "AskUbuntuDupQuestions": 63.23, "MindSmallReranking": 31.93, "SciDocsRR": 77.96, "StackOverflowDupQuestions": 53.5 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "gtr-t5-xxl", "ArguAna": 53.77, "CQADupstackRetrieval": 38.56, "ClimateFEVER": 27.21, "DBPedia": 41.28, "FEVER": 74.08, "FiQA2018": 46.78, "HotpotQA": 59.67, "MSMARCO": 44.05, "NFCorpus": 34.18, "NQ": 57.24, "QuoraRetrieval": 89.09, "SCIDOCS": 15.88, "SciFact": 66.77, "TRECCOVID": 51.9, "Touche2020": 26.76 } ] }, "STS": { "spearman": [ { "Model": "gtr-t5-xxl", "BIOSSES": 81.91, "SICK-R": 74.29, "STS12": 70.12, "STS13": 82.72, "STS14": 78.24, "STS15": 86.26, "STS16": 81.61, "STS17 (en-en)": 85.18, "STS22 (en)": 65.76, "STSBenchmark": 77.73 } ] }, "Summarization": { "spearman": [ { "Model": "gtr-t5-xxl", "SummEval": 30.64 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "gtr-t5-xxl" } ] } }, "norbert3-large": { "BitextMining": { "f1": [ { "Model": "norbert3-large", "BornholmBitextMining": 2.9 } ] }, "Classification": { "accuracy": [ { "Model": "norbert3-large", "AngryTweetsClassification": 49.04, "DKHateClassification": 62.71, "DanishPoliticalCommentsClassification": 33.53, "LccSentimentClassification": 46.93, "MassiveIntentClassification (da)": 45.98, "MassiveIntentClassification (nb)": 47.42, "MassiveIntentClassification (sv)": 48.47, "MassiveScenarioClassification (da)": 50.51, "MassiveScenarioClassification (nb)": 54.25, "MassiveScenarioClassification (sv)": 50.6, "NoRecClassification": 50.46, "NordicLangClassification": 84.25, "NorwegianParliament": 58.85, "ScalaDaClassification": 60.72, "ScalaNbClassification": 66.79 } ] }, "Clustering": { "v_measure": [ { "Model": "norbert3-large" } ] }, "PairClassification": { "ap": [ { "Model": "norbert3-large" } ] }, "Reranking": { "map": [ { "Model": "norbert3-large" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "norbert3-large" } ] }, "STS": { "spearman": [ { "Model": "norbert3-large" } ] }, "Summarization": { "spearman": [ { "Model": "norbert3-large" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "norbert3-large" } ] } }, "sentence-bert-swedish-cased": { "BitextMining": { "f1": [ { "Model": "sentence-bert-swedish-cased", "BornholmBitextMining": 14.08 } ] }, "Classification": { "accuracy": [ { "Model": "sentence-bert-swedish-cased", "AngryTweetsClassification": 44.46, "DKHateClassification": 59.36, "DanishPoliticalCommentsClassification": 28.32, "LccSentimentClassification": 47.2, "MassiveIntentClassification (da)": 42.84, "MassiveIntentClassification (nb)": 42.74, "MassiveIntentClassification (sv)": 69.11, "MassiveScenarioClassification (da)": 49.64, "MassiveScenarioClassification (nb)": 49.49, "MassiveScenarioClassification (sv)": 75.96, "NoRecClassification": 43.53, "NordicLangClassification": 51.45, "NorwegianParliament": 55.74, "ScalaDaClassification": 50.12, "ScalaNbClassification": 50.34 } ] }, "Clustering": { "v_measure": [ { "Model": "sentence-bert-swedish-cased" } ] }, "PairClassification": { "ap": [ { "Model": "sentence-bert-swedish-cased" } ] }, "Reranking": { "map": [ { "Model": "sentence-bert-swedish-cased" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "sentence-bert-swedish-cased" } ] }, "STS": { "spearman": [ { "Model": "sentence-bert-swedish-cased" } ] }, "Summarization": { "spearman": [ { "Model": "sentence-bert-swedish-cased" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "sentence-bert-swedish-cased" } ] } }, "universal-sentence-encoder-multilingual-3": { "BitextMining": { "f1": [ { "Model": "universal-sentence-encoder-multilingual-3" } ] }, "Classification": { "accuracy": [ { "Model": "universal-sentence-encoder-multilingual-3", "AmazonReviewsClassification (fr)": 33.51, "MTOPDomainClassification (fr)": 85.5, "MTOPIntentClassification (fr)": 53.98, "MasakhaNEWSClassification (fra)": 82.06, "MassiveIntentClassification (fr)": 61.19, "MassiveScenarioClassification (fr)": 70.22 } ] }, "Clustering": { "v_measure": [ { "Model": "universal-sentence-encoder-multilingual-3", "AlloProfClusteringP2P": 56.9, "AlloProfClusteringS2S": 37.84, "HALClusteringS2S": 18.95, "MLSUMClusteringP2P": 43.9, "MLSUMClusteringS2S": 35.5, "MasakhaNEWSClusteringP2P (fra)": 60.57, "MasakhaNEWSClusteringS2S (fra)": 40.31 } ] }, "PairClassification": { "ap": [ { "Model": "universal-sentence-encoder-multilingual-3", "OpusparcusPC (fr)": 91.46, "PawsXPairClassification (fr)": 52.39 } ] }, "Reranking": { "map": [ { "Model": "universal-sentence-encoder-multilingual-3", "AlloprofReranking": 56.23, "SyntecReranking": 73.85 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "universal-sentence-encoder-multilingual-3", "AlloprofRetrieval": 35.27, "BSARDRetrieval": 0.0, "MintakaRetrieval (fr)": 26.12, "SyntecRetrieval": 69.82, "XPQARetrieval (fr)": 59.59 } ] }, "STS": { "spearman": [ { "Model": "universal-sentence-encoder-multilingual-3", "SICKFr": 71.37, "STS22 (fr)": 77.91, "STSBenchmarkMultilingualSTS (fr)": 75.48 } ] }, "Summarization": { "spearman": [ { "Model": "universal-sentence-encoder-multilingual-3", "SummEvalFr": 28.21 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "universal-sentence-encoder-multilingual-3" } ] } }, "text-similarity-ada-001": { "BitextMining": { "f1": [ { "Model": "text-similarity-ada-001" } ] }, "Classification": { "accuracy": [ { "Model": "text-similarity-ada-001", "AmazonCounterfactualClassification (en)": 76.4, "AmazonPolarityClassification": 92.83, "AmazonReviewsClassification (en)": 47.45, "Banking77Classification": 68.04, "EmotionClassification": 50.33, "ImdbClassification": 89.38, "MTOPDomainClassification (en)": 89.89, "MTOPIntentClassification (en)": 64.8, "MassiveIntentClassification (en)": 65.17, "MassiveScenarioClassification (en)": 67.67, "ToxicConversationsClassification": 70.0, "TweetSentimentExtractionClassification": 63.35 } ] }, "Clustering": { "v_measure": [ { "Model": "text-similarity-ada-001", "ArxivClusteringP2P": 41.49, "ArxivClusteringS2S": 28.47, "BiorxivClusteringP2P": 36.86, "BiorxivClusteringS2S": 27.55, "MedrxivClusteringP2P": 31.09, "MedrxivClusteringS2S": 26.5, "RedditClustering": 42.47, "RedditClusteringP2P": 58.1, "StackExchangeClustering": 53.52, "StackExchangeClusteringP2P": 30.43, "TwentyNewsgroupsClustering": 36.26 } ] }, "PairClassification": { "ap": [ { "Model": "text-similarity-ada-001", "SprintDuplicateQuestions": 77.85, "TwitterSemEval2015": 69.04, "TwitterURLCorpus": 83.69 } ] }, "Reranking": { "map": [ { "Model": "text-similarity-ada-001", "AskUbuntuDupQuestions": 53.49, "MindSmallReranking": 30.71, "SciDocsRR": 71.04, "StackOverflowDupQuestions": 40.85 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "text-similarity-ada-001", "ArguAna": 39.65, "CQADupstackRetrieval": 10.17, "ClimateFEVER": 2.83, "DBPedia": 3.48, "FEVER": 4.45, "FiQA2018": 7.54, "HotpotQA": 12.6, "MSMARCO": 10.53, "NFCorpus": 20.59, "NQ": 2.02, "QuoraRetrieval": 82.18, "SCIDOCS": 6.28, "SciFact": 45.46, "TRECCOVID": 24.56, "Touche2020": 3.1 } ] }, "STS": { "spearman": [ { "Model": "text-similarity-ada-001", "BIOSSES": 78.04, "SICK-R": 77.48, "STS12": 72.3, "STS13": 81.49, "STS14": 74.74, "STS15": 84.28, "STS16": 82.06, "STS17 (en-en)": 87.08, "STS22 (en)": 64.71, "STSBenchmark": 83.78 } ] }, "Summarization": { "spearman": [ { "Model": "text-similarity-ada-001", "SummEval": 26.94 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "text-similarity-ada-001" } ] } }, "rubert-base-cased-sentence": { "BitextMining": { "f1": [ { "Model": "rubert-base-cased-sentence", "Tatoeba (rus-Cyrl_eng-Latn)": 20.26 } ] }, "Classification": { "accuracy": [ { "Model": "rubert-base-cased-sentence", "GeoreviewClassification (rus-Cyrl)": 38.05, "HeadlineClassification (rus-Cyrl)": 67.64, "InappropriatenessClassification (rus-Cyrl)": 58.27, "KinopoiskClassification (rus-Cyrl)": 45.86, "MassiveIntentClassification (rus-Cyrl)": 49.1, "MassiveScenarioClassification (rus-Cyrl)": 51.91, "RuReviewsClassification (rus-Cyrl)": 58.34, "RuSciBenchGRNTIClassification (rus-Cyrl)": 52.18, "RuSciBenchOECDClassification (rus-Cyrl)": 40.11 } ] }, "Clustering": { "v_measure": [ { "Model": "rubert-base-cased-sentence", "GeoreviewClusteringP2P (rus-Cyrl)": 41.82, "MLSUMClusteringP2P (rus-Cyrl)": 43.71, "MLSUMClusteringS2S (rus-Cyrl)": 45.94, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 46.29, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 41.28 } ] }, "PairClassification": { "ap": [ { "Model": "rubert-base-cased-sentence", "OpusparcusPC (rus-Cyrl)": 81.52, "TERRa (rus-Cyrl)": 59.12 } ] }, "Reranking": { "map": [ { "Model": "rubert-base-cased-sentence", "RuBQReranking (rus-Cyrl)": 39.89 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "rubert-base-cased-sentence", "RiaNewsRetrieval (rus-Cyrl)": 6.72, "RuBQRetrieval (rus-Cyrl)": 12.63 } ] }, "STS": { "spearman": [ { "Model": "rubert-base-cased-sentence", "RUParaPhraserSTS (rus-Cyrl)": 66.24, "RuSTSBenchmarkSTS (rus-Cyrl)": 66.03, "STS22 (rus-Cyrl)": 51.27, "STSBenchmarkMultilingualSTS (rus-Cyrl)": 66.71 } ] }, "Summarization": { "spearman": [ { "Model": "rubert-base-cased-sentence" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "rubert-base-cased-sentence" } ] } }, "gelectra-large": { "BitextMining": { "f1": [ { "Model": "gelectra-large" } ] }, "Classification": { "accuracy": [ { "Model": "gelectra-large" } ] }, "Clustering": { "v_measure": [ { "Model": "gelectra-large", "BlurbsClusteringP2P": 13.96, "BlurbsClusteringS2S": 7.57, "TenKGnadClusteringP2P": 11.49, "TenKGnadClusteringS2S": 3.91 } ] }, "PairClassification": { "ap": [ { "Model": "gelectra-large" } ] }, "Reranking": { "map": [ { "Model": "gelectra-large" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "gelectra-large" } ] }, "STS": { "spearman": [ { "Model": "gelectra-large" } ] }, "Summarization": { "spearman": [ { "Model": "gelectra-large" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "gelectra-large" } ] } }, "gtr-t5-base": { "BitextMining": { "f1": [ { "Model": "gtr-t5-base" } ] }, "Classification": { "accuracy": [ { "Model": "gtr-t5-base", "AmazonCounterfactualClassification (en)": 69.33, "AmazonPolarityClassification": 67.82, "AmazonReviewsClassification (en)": 38.48, "Banking77Classification": 79.26, "EmotionClassification": 42.2, "ImdbClassification": 65.99, "MTOPDomainClassification (en)": 92.42, "MTOPIntentClassification (en)": 62.44, "MassiveIntentClassification (en)": 67.05, "MassiveScenarioClassification (en)": 75.4, "ToxicConversationsClassification": 66.6, "TweetSentimentExtractionClassification": 56.02 } ] }, "Clustering": { "v_measure": [ { "Model": "gtr-t5-base", "ArxivClusteringP2P": 35.49, "ArxivClusteringS2S": 27.18, "BiorxivClusteringP2P": 27.66, "BiorxivClusteringS2S": 23.25, "MedrxivClusteringP2P": 27.57, "MedrxivClusteringS2S": 25.13, "RedditClustering": 56.13, "RedditClusteringP2P": 58.53, "StackExchangeClustering": 64.21, "StackExchangeClusteringP2P": 33.01, "TwentyNewsgroupsClustering": 46.72 } ] }, "PairClassification": { "ap": [ { "Model": "gtr-t5-base", "SprintDuplicateQuestions": 94.55, "TwitterSemEval2015": 72.23, "TwitterURLCorpus": 84.77 } ] }, "Reranking": { "map": [ { "Model": "gtr-t5-base", "AskUbuntuDupQuestions": 60.86, "MindSmallReranking": 31.33, "SciDocsRR": 73.71, "StackOverflowDupQuestions": 51.01 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "gtr-t5-base", "ArguAna": 50.83, "CQADupstackRetrieval": 34.55, "ClimateFEVER": 24.88, "DBPedia": 35.24, "FEVER": 68.93, "FiQA2018": 35.15, "HotpotQA": 54.93, "MSMARCO": 41.16, "NFCorpus": 30.22, "NQ": 50.47, "QuoraRetrieval": 87.98, "SCIDOCS": 14.0, "SciFact": 59.74, "TRECCOVID": 56.05, "Touche2020": 25.89 } ] }, "STS": { "spearman": [ { "Model": "gtr-t5-base", "BIOSSES": 79.0, "SICK-R": 71.45, "STS12": 68.59, "STS13": 79.09, "STS14": 74.64, "STS15": 84.85, "STS16": 81.57, "STS17 (en-en)": 85.8, "STS22 (en)": 66.17, "STSBenchmark": 79.58 } ] }, "Summarization": { "spearman": [ { "Model": "gtr-t5-base", "SummEval": 29.67 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "gtr-t5-base" } ] } }, "bge-base-en-v1.5-instruct": { "BitextMining": { "f1": [ { "Model": "bge-base-en-v1.5-instruct" } ] }, "Classification": { "accuracy": [ { "Model": "bge-base-en-v1.5-instruct" } ] }, "Clustering": { "v_measure": [ { "Model": "bge-base-en-v1.5-instruct" } ] }, "PairClassification": { "ap": [ { "Model": "bge-base-en-v1.5-instruct" } ] }, "Reranking": { "map": [ { "Model": "bge-base-en-v1.5-instruct" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "bge-base-en-v1.5-instruct", "ARCChallenge": 8.85, "AlphaNLI": 4.13, "HellaSwag": 24.03, "PIQA": 23.03, "Quail": 1.25, "RARbCode": 46.32, "RARbMath": 45.62, "SIQA": 0.24, "SpartQA": 2.67, "TempReasonL1": 0.8, "TempReasonL2Fact": 16.56, "TempReasonL2Pure": 1.33, "TempReasonL3Fact": 12.68, "TempReasonL3Pure": 5.08, "WinoGrande": 10.27 } ] }, "STS": { "spearman": [ { "Model": "bge-base-en-v1.5-instruct" } ] }, "Summarization": { "spearman": [ { "Model": "bge-base-en-v1.5-instruct" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "bge-base-en-v1.5-instruct" } ] } }, "all-MiniLM-L12-v2": { "BitextMining": { "f1": [ { "Model": "all-MiniLM-L12-v2", "BornholmBitextMining (dan-Latn)": 35.25, "Tatoeba (spa-Latn_eng-Latn)": 11.26, "Tatoeba (bos-Latn_eng-Latn)": 7.05, "Tatoeba (xho-Latn_eng-Latn)": 3.66, "Tatoeba (fry-Latn_eng-Latn)": 14.53, "Tatoeba (tur-Latn_eng-Latn)": 3.69, "Tatoeba (fao-Latn_eng-Latn)": 5.92, "Tatoeba (vie-Latn_eng-Latn)": 5.06, "Tatoeba (ind-Latn_eng-Latn)": 5.3, "Tatoeba (pol-Latn_eng-Latn)": 4.29, "Tatoeba (swe-Latn_eng-Latn)": 7.31, "Tatoeba (ita-Latn_eng-Latn)": 12.57, "Tatoeba (dtp-Latn_eng-Latn)": 3.31, "Tatoeba (ron-Latn_eng-Latn)": 8.77, "Tatoeba (isl-Latn_eng-Latn)": 3.44, "Tatoeba (hrv-Latn_eng-Latn)": 5.68, "Tatoeba (cha-Latn_eng-Latn)": 13.07, "Tatoeba (cor-Latn_eng-Latn)": 2.47, "Tatoeba (cym-Latn_eng-Latn)": 5.13, "Tatoeba (jpn-Jpan_eng-Latn)": 2.18, "Tatoeba (lfn-Latn_eng-Latn)": 7.52, "Tatoeba (hun-Latn_eng-Latn)": 3.93, "Tatoeba (lat-Latn_eng-Latn)": 7.14, "Tatoeba (tgl-Latn_eng-Latn)": 3.34, "Tatoeba (kur-Latn_eng-Latn)": 7.3, "Tatoeba (war-Latn_eng-Latn)": 6.18, "Tatoeba (kab-Latn_eng-Latn)": 0.91, "Tatoeba (kaz-Cyrl_eng-Latn)": 0.82, "Tatoeba (slv-Latn_eng-Latn)": 4.52, "Tatoeba (nds-Latn_eng-Latn)": 11.35, "Tatoeba (pam-Latn_eng-Latn)": 4.73, "Tatoeba (bul-Cyrl_eng-Latn)": 0.23, "Tatoeba (ces-Latn_eng-Latn)": 4.2, "Tatoeba (nno-Latn_eng-Latn)": 7.45, "Tatoeba (ben-Beng_eng-Latn)": 0.02, "Tatoeba (amh-Ethi_eng-Latn)": 0.01, "Tatoeba (lit-Latn_eng-Latn)": 1.56, "Tatoeba (pes-Arab_eng-Latn)": 0.3, "Tatoeba (jav-Latn_eng-Latn)": 3.5, "Tatoeba (mal-Mlym_eng-Latn)": 0.24, "Tatoeba (lvs-Latn_eng-Latn)": 3.45, "Tatoeba (gsw-Latn_eng-Latn)": 9.9, "Tatoeba (fra-Latn_eng-Latn)": 17.53, "Tatoeba (orv-Cyrl_eng-Latn)": 0.15, "Tatoeba (kat-Geor_eng-Latn)": 0.45, "Tatoeba (awa-Deva_eng-Latn)": 0.44, "Tatoeba (epo-Latn_eng-Latn)": 8.5, "Tatoeba (mhr-Cyrl_eng-Latn)": 0.0, "Tatoeba (dan-Latn_eng-Latn)": 10.21, "Tatoeba (bel-Cyrl_eng-Latn)": 0.85, "Tatoeba (nld-Latn_eng-Latn)": 12.56, "Tatoeba (mkd-Cyrl_eng-Latn)": 0.01, "Tatoeba (mon-Cyrl_eng-Latn)": 0.06, "Tatoeba (ast-Latn_eng-Latn)": 9.99, "Tatoeba (cat-Latn_eng-Latn)": 11.79, "Tatoeba (oci-Latn_eng-Latn)": 8.72, "Tatoeba (khm-Khmr_eng-Latn)": 0.42, "Tatoeba (urd-Arab_eng-Latn)": 0.0, "Tatoeba (tzl-Latn_eng-Latn)": 6.87, "Tatoeba (arq-Arab_eng-Latn)": 0.28, "Tatoeba (uig-Arab_eng-Latn)": 0.4, "Tatoeba (dsb-Latn_eng-Latn)": 3.06, "Tatoeba (hsb-Latn_eng-Latn)": 2.89, "Tatoeba (kzj-Latn_eng-Latn)": 3.64, "Tatoeba (cbk-Latn_eng-Latn)": 9.76, "Tatoeba (afr-Latn_eng-Latn)": 7.59, "Tatoeba (gle-Latn_eng-Latn)": 3.08, "Tatoeba (csb-Latn_eng-Latn)": 5.21, "Tatoeba (mar-Deva_eng-Latn)": 0.04, "Tatoeba (arz-Arab_eng-Latn)": 0.0, "Tatoeba (tat-Cyrl_eng-Latn)": 0.75, "Tatoeba (hin-Deva_eng-Latn)": 0.0, "Tatoeba (ang-Latn_eng-Latn)": 14.63, "Tatoeba (heb-Hebr_eng-Latn)": 0.3, "Tatoeba (tuk-Latn_eng-Latn)": 2.66, "Tatoeba (ile-Latn_eng-Latn)": 17.43, "Tatoeba (zsm-Latn_eng-Latn)": 5.99, "Tatoeba (kor-Hang_eng-Latn)": 0.9, "Tatoeba (uzb-Latn_eng-Latn)": 2.2, "Tatoeba (fin-Latn_eng-Latn)": 3.65, "Tatoeba (hye-Armn_eng-Latn)": 0.5, "Tatoeba (ukr-Cyrl_eng-Latn)": 0.57, "Tatoeba (swh-Latn_eng-Latn)": 5.82, "Tatoeba (gla-Latn_eng-Latn)": 2.58, "Tatoeba (aze-Latn_eng-Latn)": 1.47, "Tatoeba (ara-Arab_eng-Latn)": 0.43, "Tatoeba (eus-Latn_eng-Latn)": 6.58, "Tatoeba (deu-Latn_eng-Latn)": 13.89, "Tatoeba (por-Latn_eng-Latn)": 11.36, "Tatoeba (ber-Tfng_eng-Latn)": 4.72, "Tatoeba (sqi-Latn_eng-Latn)": 5.86, "Tatoeba (pms-Latn_eng-Latn)": 8.94, "Tatoeba (ina-Latn_eng-Latn)": 25.36, "Tatoeba (ido-Latn_eng-Latn)": 11.08, "Tatoeba (slk-Latn_eng-Latn)": 4.2, "Tatoeba (glg-Latn_eng-Latn)": 12.6, "Tatoeba (nov-Latn_eng-Latn)": 19.45, "Tatoeba (tel-Telu_eng-Latn)": 0.67, "Tatoeba (tam-Taml_eng-Latn)": 0.33, "Tatoeba (bre-Latn_eng-Latn)": 3.68, "Tatoeba (tha-Thai_eng-Latn)": 0.67, "Tatoeba (nob-Latn_eng-Latn)": 8.02, "Tatoeba (est-Latn_eng-Latn)": 2.6, "Tatoeba (wuu-Hans_eng-Latn)": 1.89, "Tatoeba (swg-Latn_eng-Latn)": 11.9, "Tatoeba (max-Deva_eng-Latn)": 8.4, "Tatoeba (srp-Cyrl_eng-Latn)": 2.22, "Tatoeba (yue-Hant_eng-Latn)": 1.89, "Tatoeba (rus-Cyrl_eng-Latn)": 0.07, "Tatoeba (ell-Grek_eng-Latn)": 0.2, "Tatoeba (ceb-Latn_eng-Latn)": 3.95, "Tatoeba (yid-Hebr_eng-Latn)": 0.19, "Tatoeba (cmn-Hans_eng-Latn)": 2.45 } ] }, "Classification": { "accuracy": [ { "Model": "all-MiniLM-L12-v2", "AllegroReviews (pol-Latn)": 23.85, "AmazonCounterfactualClassification (en-ext)": 67.24, "AmazonCounterfactualClassification (en)": 65.28, "AmazonCounterfactualClassification (deu-Latn)": 57.13, "AmazonCounterfactualClassification (jpn-Jpan)": 59.94, "AmazonCounterfactualClassification (de)": 57.1, "AmazonCounterfactualClassification (ja)": 59.91, "AmazonPolarityClassification": 62.98, "AmazonReviewsClassification (en)": 30.79, "AmazonReviewsClassification (deu-Latn)": 25.92, "AmazonReviewsClassification (spa-Latn)": 27.64, "AmazonReviewsClassification (fra-Latn)": 27.53, "AmazonReviewsClassification (jpn-Jpan)": 23.57, "AmazonReviewsClassification (cmn-Hans)": 22.99, "AmazonReviewsClassification (de)": 25.91, "AmazonReviewsClassification (es)": 27.63, "AmazonReviewsClassification (fr)": 27.54, "AmazonReviewsClassification (ja)": 23.57, "AmazonReviewsClassification (zh)": 22.99, "AngryTweetsClassification (dan-Latn)": 42.87, "Banking77Classification": 80.4, "CBD (pol-Latn)": 48.46, "DanishPoliticalCommentsClassification (dan-Latn)": 27.07, "EmotionClassification": 41.17, "GeoreviewClassification (rus-Cyrl)": 23.49, "HeadlineClassification (rus-Cyrl)": 28.49, "IFlyTek (cmn-Hans)": 15.31, "ImdbClassification": 59.76, "InappropriatenessClassification (rus-Cyrl)": 50.85, "JDReview (cmn-Hans)": 59.57, "KinopoiskClassification (rus-Cyrl)": 34.17, "LccSentimentClassification (dan-Latn)": 41.93, "MTOPDomainClassification (en)": 91.9, "MTOPDomainClassification (deu-Latn)": 72.04, "MTOPDomainClassification (spa-Latn)": 72.99, "MTOPDomainClassification (fra-Latn)": 75.57, "MTOPDomainClassification (hin-Deva)": 40.4, "MTOPDomainClassification (tha-Thai)": 16.36, "MTOPDomainClassification (de)": 72.04, "MTOPDomainClassification (es)": 72.99, "MTOPDomainClassification (fr)": 75.59, "MTOPDomainClassification (hi)": 40.36, "MTOPDomainClassification (th)": 17.1, "MTOPIntentClassification (en)": 62.84, "MTOPIntentClassification (deu-Latn)": 43.42, "MTOPIntentClassification (spa-Latn)": 41.91, "MTOPIntentClassification (fra-Latn)": 38.96, "MTOPIntentClassification (hin-Deva)": 17.76, "MTOPIntentClassification (tha-Thai)": 6.13, "MTOPIntentClassification (de)": 43.41, "MTOPIntentClassification (es)": 41.88, "MTOPIntentClassification (fr)": 38.94, "MTOPIntentClassification (hi)": 17.75, "MTOPIntentClassification (th)": 5.63, "MasakhaNEWSClassification (amh-Ethi)": 30.64, "MasakhaNEWSClassification (eng)": 76.62, "MasakhaNEWSClassification (fra-Latn)": 67.18, "MasakhaNEWSClassification (hau-Latn)": 52.59, "MasakhaNEWSClassification (ibo-Latn)": 54.26, "MasakhaNEWSClassification (lin-Latn)": 62.23, "MasakhaNEWSClassification (lug-Latn)": 47.62, "MasakhaNEWSClassification (orm-Ethi)": 47.17, "MasakhaNEWSClassification (pcm-Latn)": 91.77, "MasakhaNEWSClassification (run-Latn)": 54.47, "MasakhaNEWSClassification (sna-Latn)": 66.53, "MasakhaNEWSClassification (som-Latn)": 40.27, "MasakhaNEWSClassification (swa-Latn)": 47.77, "MasakhaNEWSClassification (tir-Ethi)": 21.18, "MasakhaNEWSClassification (xho-Latn)": 54.34, "MasakhaNEWSClassification (yor-Latn)": 58.61, "MasakhaNEWSClassification (fra)": 72.2, "MassiveIntentClassification (jpn-Jpan)": 30.89, "MassiveIntentClassification (khm-Khmr)": 4.99, "MassiveIntentClassification (slv-Latn)": 38.48, "MassiveIntentClassification (hye-Armn)": 8.69, "MassiveIntentClassification (ita-Latn)": 43.16, "MassiveIntentClassification (fin-Latn)": 39.19, "MassiveIntentClassification (afr-Latn)": 38.84, "MassiveIntentClassification (kor-Kore)": 19.97, "MassiveIntentClassification (ben-Beng)": 13.7, "MassiveIntentClassification (heb-Hebr)": 23.71, "MassiveIntentClassification (dan-Latn)": 44.35, "MassiveIntentClassification (fra-Latn)": 44.75, "MassiveIntentClassification (pol-Latn)": 37.59, "MassiveIntentClassification (por-Latn)": 45.08, "MassiveIntentClassification (tha-Thai)": 10.46, "MassiveIntentClassification (nob-Latn)": 41.79, "MassiveIntentClassification (kat-Geor)": 9.17, "MassiveIntentClassification (tgl-Latn)": 38.63, "MassiveIntentClassification (swe-Latn)": 40.33, "MassiveIntentClassification (hun-Latn)": 37.95, "MassiveIntentClassification (cmo-Hant)": 22.38, "MassiveIntentClassification (hin-Deva)": 18.0, "MassiveIntentClassification (tur-Latn)": 35.93, "MassiveIntentClassification (vie-Latn)": 37.35, "MassiveIntentClassification (mal-Mlym)": 2.83, "MassiveIntentClassification (aze-Latn)": 34.3, "MassiveIntentClassification (amh-Ethi)": 2.45, "MassiveIntentClassification (kan-Knda)": 3.07, "MassiveIntentClassification (deu-Latn)": 44.12, "MassiveIntentClassification (rus-Cyrl)": 26.29, "MassiveIntentClassification (ara-Arab)": 21.02, "MassiveIntentClassification (msa-Latn)": 36.16, "MassiveIntentClassification (nld-Latn)": 41.77, "MassiveIntentClassification (fas-Arab)": 23.56, "MassiveIntentClassification (isl-Latn)": 35.17, "MassiveIntentClassification (cym-Latn)": 35.65, "MassiveIntentClassification (cmo-Hans)": 23.74, "MassiveIntentClassification (ell-Grek)": 28.68, "MassiveIntentClassification (spa-Latn)": 40.82, "MassiveIntentClassification (ind-Latn)": 39.65, "MassiveIntentClassification (jav-Latn)": 36.67, "MassiveIntentClassification (mon-Cyrl)": 23.27, "MassiveIntentClassification (mya-Mymr)": 4.36, "MassiveIntentClassification (sqi-Latn)": 41.47, "MassiveIntentClassification (tel-Telu)": 2.54, "MassiveIntentClassification (en)": 67.15, "MassiveIntentClassification (ron-Latn)": 41.64, "MassiveIntentClassification (tam-Taml)": 13.12, "MassiveIntentClassification (swa-Latn)": 35.26, "MassiveIntentClassification (urd-Arab)": 16.26, "MassiveIntentClassification (lav-Latn)": 38.54, "MassiveIntentClassification (af)": 38.94, "MassiveIntentClassification (am)": 2.45, "MassiveIntentClassification (ar)": 20.94, "MassiveIntentClassification (az)": 34.25, "MassiveIntentClassification (bn)": 13.67, "MassiveIntentClassification (cy)": 35.71, "MassiveIntentClassification (da)": 44.43, "MassiveIntentClassification (de)": 44.17, "MassiveIntentClassification (el)": 28.7, "MassiveIntentClassification (es)": 40.91, "MassiveIntentClassification (fa)": 23.52, "MassiveIntentClassification (fi)": 39.27, "MassiveIntentClassification (fr)": 44.82, "MassiveIntentClassification (he)": 23.65, "MassiveIntentClassification (hi)": 17.98, "MassiveIntentClassification (hu)": 38.0, "MassiveIntentClassification (hy)": 8.69, "MassiveIntentClassification (id)": 39.66, "MassiveIntentClassification (is)": 35.14, "MassiveIntentClassification (it)": 43.17, "MassiveIntentClassification (ja)": 30.94, "MassiveIntentClassification (jv)": 36.69, "MassiveIntentClassification (ka)": 9.17, "MassiveIntentClassification (km)": 4.99, "MassiveIntentClassification (kn)": 3.08, "MassiveIntentClassification (ko)": 19.97, "MassiveIntentClassification (lv)": 38.61, "MassiveIntentClassification (ml)": 2.85, "MassiveIntentClassification (mn)": 23.25, "MassiveIntentClassification (ms)": 36.21, "MassiveIntentClassification (my)": 4.38, "MassiveIntentClassification (nb)": 41.91, "MassiveIntentClassification (nl)": 41.85, "MassiveIntentClassification (pl)": 37.63, "MassiveIntentClassification (pt)": 45.12, "MassiveIntentClassification (ro)": 41.71, "MassiveIntentClassification (ru)": 26.33, "MassiveIntentClassification (sl)": 38.52, "MassiveIntentClassification (sq)": 41.62, "MassiveIntentClassification (sv)": 40.42, "MassiveIntentClassification (sw)": 35.28, "MassiveIntentClassification (ta)": 13.1, "MassiveIntentClassification (te)": 2.56, "MassiveIntentClassification (th)": 10.54, "MassiveIntentClassification (tl)": 38.56, "MassiveIntentClassification (tr)": 35.9, "MassiveIntentClassification (ur)": 16.18, "MassiveIntentClassification (vi)": 37.38, "MassiveIntentClassification (zh-CN)": 23.74, "MassiveIntentClassification (zh-TW)": 22.39, "MassiveScenarioClassification (jav-Latn)": 44.54, "MassiveScenarioClassification (aze-Latn)": 39.62, "MassiveScenarioClassification (cmo-Hans)": 33.19, "MassiveScenarioClassification (swa-Latn)": 43.18, "MassiveScenarioClassification (fra-Latn)": 53.77, "MassiveScenarioClassification (mon-Cyrl)": 29.01, "MassiveScenarioClassification (kat-Geor)": 14.85, "MassiveScenarioClassification (ben-Beng)": 18.98, "MassiveScenarioClassification (ind-Latn)": 44.37, "MassiveScenarioClassification (kor-Kore)": 25.72, "MassiveScenarioClassification (lav-Latn)": 42.75, "MassiveScenarioClassification (deu-Latn)": 52.08, "MassiveScenarioClassification (hun-Latn)": 44.1, "MassiveScenarioClassification (tam-Taml)": 19.4, "MassiveScenarioClassification (afr-Latn)": 45.72, "MassiveScenarioClassification (nob-Latn)": 47.35, "MassiveScenarioClassification (urd-Arab)": 24.45, "MassiveScenarioClassification (tha-Thai)": 18.32, "MassiveScenarioClassification (ita-Latn)": 51.7, "MassiveScenarioClassification (en)": 74.58, "MassiveScenarioClassification (sqi-Latn)": 49.12, "MassiveScenarioClassification (mya-Mymr)": 10.06, "MassiveScenarioClassification (ara-Arab)": 27.66, "MassiveScenarioClassification (tur-Latn)": 41.8, "MassiveScenarioClassification (khm-Khmr)": 9.75, "MassiveScenarioClassification (cym-Latn)": 41.43, "MassiveScenarioClassification (cmo-Hant)": 31.14, "MassiveScenarioClassification (hye-Armn)": 14.87, "MassiveScenarioClassification (ell-Grek)": 35.55, "MassiveScenarioClassification (ron-Latn)": 49.94, "MassiveScenarioClassification (kan-Knda)": 8.32, "MassiveScenarioClassification (jpn-Jpan)": 36.77, "MassiveScenarioClassification (fin-Latn)": 45.8, "MassiveScenarioClassification (swe-Latn)": 46.81, "MassiveScenarioClassification (dan-Latn)": 49.5, "MassiveScenarioClassification (msa-Latn)": 44.67, "MassiveScenarioClassification (hin-Deva)": 23.03, "MassiveScenarioClassification (tgl-Latn)": 48.29, "MassiveScenarioClassification (pol-Latn)": 44.74, "MassiveScenarioClassification (isl-Latn)": 43.11, "MassiveScenarioClassification (por-Latn)": 53.0, "MassiveScenarioClassification (slv-Latn)": 42.24, "MassiveScenarioClassification (rus-Cyrl)": 28.77, "MassiveScenarioClassification (tel-Telu)": 7.74, "MassiveScenarioClassification (heb-Hebr)": 25.73, "MassiveScenarioClassification (fas-Arab)": 29.0, "MassiveScenarioClassification (vie-Latn)": 40.97, "MassiveScenarioClassification (nld-Latn)": 49.14, "MassiveScenarioClassification (spa-Latn)": 50.73, "MassiveScenarioClassification (mal-Mlym)": 7.25, "MassiveScenarioClassification (amh-Ethi)": 7.41, "MassiveScenarioClassification (af)": 45.71, "MassiveScenarioClassification (am)": 7.41, "MassiveScenarioClassification (ar)": 27.62, "MassiveScenarioClassification (az)": 39.58, "MassiveScenarioClassification (bn)": 18.98, "MassiveScenarioClassification (cy)": 41.4, "MassiveScenarioClassification (da)": 49.47, "MassiveScenarioClassification (de)": 52.07, "MassiveScenarioClassification (el)": 35.51, "MassiveScenarioClassification (es)": 50.74, "MassiveScenarioClassification (fa)": 29.0, "MassiveScenarioClassification (fi)": 45.8, "MassiveScenarioClassification (fr)": 53.76, "MassiveScenarioClassification (he)": 25.68, "MassiveScenarioClassification (hi)": 23.02, "MassiveScenarioClassification (hu)": 44.09, "MassiveScenarioClassification (hy)": 14.83, "MassiveScenarioClassification (id)": 44.35, "MassiveScenarioClassification (is)": 43.08, "MassiveScenarioClassification (it)": 51.71, "MassiveScenarioClassification (ja)": 36.75, "MassiveScenarioClassification (jv)": 44.57, "MassiveScenarioClassification (ka)": 14.84, "MassiveScenarioClassification (km)": 9.75, "MassiveScenarioClassification (kn)": 8.32, "MassiveScenarioClassification (ko)": 25.72, "MassiveScenarioClassification (lv)": 42.75, "MassiveScenarioClassification (ml)": 7.25, "MassiveScenarioClassification (mn)": 29.03, "MassiveScenarioClassification (ms)": 44.65, "MassiveScenarioClassification (my)": 10.07, "MassiveScenarioClassification (nb)": 47.36, "MassiveScenarioClassification (nl)": 49.15, "MassiveScenarioClassification (pl)": 44.72, "MassiveScenarioClassification (pt)": 53.0, "MassiveScenarioClassification (ro)": 49.97, "MassiveScenarioClassification (ru)": 28.75, "MassiveScenarioClassification (sl)": 42.26, "MassiveScenarioClassification (sq)": 49.14, "MassiveScenarioClassification (sv)": 46.83, "MassiveScenarioClassification (sw)": 43.18, "MassiveScenarioClassification (ta)": 19.38, "MassiveScenarioClassification (te)": 7.74, "MassiveScenarioClassification (th)": 18.32, "MassiveScenarioClassification (tl)": 48.31, "MassiveScenarioClassification (tr)": 41.79, "MassiveScenarioClassification (ur)": 24.46, "MassiveScenarioClassification (vi)": 40.94, "MassiveScenarioClassification (zh-CN)": 33.18, "MassiveScenarioClassification (zh-TW)": 31.16, "MultilingualSentiment (cmn-Hans)": 40.52, "NoRecClassification (nob-Latn)": 37.73, "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 54.17, "OnlineShopping (cmn-Hans)": 58.65, "PAC (pol-Latn)": 59.53, "PolEmo2.0-IN (pol-Latn)": 38.32, "PolEmo2.0-OUT (pol-Latn)": 22.98, "RuReviewsClassification (rus-Cyrl)": 42.49, "RuSciBenchGRNTIClassification (rus-Cyrl)": 10.49, "RuSciBenchOECDClassification (rus-Cyrl)": 8.31, "TNews (cmn-Hans)": 20.37, "ToxicConversationsClassification": 67.47, "TweetSentimentExtractionClassification": 54.25, "Waimai (cmn-Hans)": 63.48 } ] }, "Clustering": { "v_measure": [ { "Model": "all-MiniLM-L12-v2", "AlloProfClusteringP2P": 46.03, "AlloProfClusteringS2S": 31.83, "ArxivClusteringP2P": 46.07, "ArxivClusteringS2S": 37.5, "BiorxivClusteringP2P": 36.99, "BiorxivClusteringS2S": 33.21, "GeoreviewClusteringP2P (rus-Cyrl)": 20.76, "HALClusteringS2S": 19.58, "MLSUMClusteringP2P": 34.35, "MLSUMClusteringS2S": 29.3, "MasakhaNEWSClusteringP2P (amh-Ethi)": 40.5, "MasakhaNEWSClusteringP2P (eng)": 55.86, "MasakhaNEWSClusteringP2P (fra-Latn)": 42.72, "MasakhaNEWSClusteringP2P (hau-Latn)": 26.61, "MasakhaNEWSClusteringP2P (ibo-Latn)": 44.26, "MasakhaNEWSClusteringP2P (lin-Latn)": 54.52, "MasakhaNEWSClusteringP2P (lug-Latn)": 43.87, "MasakhaNEWSClusteringP2P (orm-Ethi)": 24.87, "MasakhaNEWSClusteringP2P (pcm-Latn)": 74.42, "MasakhaNEWSClusteringP2P (run-Latn)": 51.73, "MasakhaNEWSClusteringP2P (sna-Latn)": 46.89, "MasakhaNEWSClusteringP2P (som-Latn)": 31.17, "MasakhaNEWSClusteringP2P (swa-Latn)": 23.72, "MasakhaNEWSClusteringP2P (tir-Ethi)": 44.08, "MasakhaNEWSClusteringP2P (xho-Latn)": 26.97, "MasakhaNEWSClusteringP2P (yor-Latn)": 32.51, "MasakhaNEWSClusteringP2P (fra)": 42.72, "MasakhaNEWSClusteringS2S (amh-Ethi)": 44.11, "MasakhaNEWSClusteringS2S (eng)": 40.71, "MasakhaNEWSClusteringS2S (fra-Latn)": 32.47, "MasakhaNEWSClusteringS2S (hau-Latn)": 20.63, "MasakhaNEWSClusteringS2S (ibo-Latn)": 35.33, "MasakhaNEWSClusteringS2S (lin-Latn)": 54.52, "MasakhaNEWSClusteringS2S (lug-Latn)": 51.42, "MasakhaNEWSClusteringS2S (orm-Ethi)": 24.84, "MasakhaNEWSClusteringS2S (pcm-Latn)": 70.72, "MasakhaNEWSClusteringS2S (run-Latn)": 50.88, "MasakhaNEWSClusteringS2S (sna-Latn)": 46.6, "MasakhaNEWSClusteringS2S (som-Latn)": 29.87, "MasakhaNEWSClusteringS2S (swa-Latn)": 10.82, "MasakhaNEWSClusteringS2S (tir-Ethi)": 43.63, "MasakhaNEWSClusteringS2S (xho-Latn)": 24.55, "MasakhaNEWSClusteringS2S (yor-Latn)": 32.85, "MasakhaNEWSClusteringS2S (fra)": 32.47, "MedrxivClusteringP2P": 34.25, "MedrxivClusteringS2S": 32.24, "RedditClustering": 51.18, "RedditClusteringP2P": 54.8, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 10.65, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 10.19, "StackExchangeClustering": 53.05, "StackExchangeClusteringP2P": 33.13, "TwentyNewsgroupsClustering": 47.47 } ] }, "PairClassification": { "ap": [ { "Model": "all-MiniLM-L12-v2", "CDSC-E (pol-Latn)": 49.04, "OpusparcusPC (deu-Latn)": 91.2, "OpusparcusPC (en)": 97.41, "OpusparcusPC (fin-Latn)": 85.99, "OpusparcusPC (fra-Latn)": 87.35, "OpusparcusPC (rus-Cyrl)": 79.23, "OpusparcusPC (swe-Latn)": 84.87, "PSC (pol-Latn)": 87.92, "PawsXPairClassification (deu-Latn)": 50.83, "PawsXPairClassification (en)": 58.62, "PawsXPairClassification (spa-Latn)": 52.08, "PawsXPairClassification (fra-Latn)": 55.54, "PawsXPairClassification (jpn-Hira)": 47.75, "PawsXPairClassification (kor-Hang)": 49.59, "PawsXPairClassification (cmn-Hans)": 52.8, "SICK-E-PL (pol-Latn)": 49.63, "SprintDuplicateQuestions": 92.45, "TERRa (rus-Cyrl)": 46.4, "TwitterSemEval2015": 70.02, "TwitterURLCorpus": 84.77 } ] }, "Reranking": { "map": [ { "Model": "all-MiniLM-L12-v2", "AlloprofReranking (fra-Latn)": 67.01, "AskUbuntuDupQuestions": 64.06, "MMarcoReranking (cmn-Hans)": 5.27, "MindSmallReranking": 31.02, "RuBQReranking (rus-Cyrl)": 38.51, "SciDocsRR": 87.2, "StackOverflowDupQuestions": 51.47, "SyntecReranking (fra-Latn)": 69.17, "T2Reranking (cmn-Hans)": 60.32 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "all-MiniLM-L12-v2", "AILACasedocs": 16.8, "AILAStatutes": 20.71, "ARCChallenge": 10.23, "AlloprofRetrieval (fra-Latn)": 33.2, "AlloprofRetrieval": 33.2, "AlphaNLI": 25.35, "ArguAna": 47.13, "ArguAna-PL (pol-Latn)": 13.4, "BSARDRetrieval (fra-Latn)": 6.24, "CQADupstackRetrieval": 42.53, "ClimateFEVER": 21.57, "CmedqaRetrieval (cmn-Hans)": 2.58, "CovidRetrieval (cmn-Hans)": 10.79, "DBPedia": 33.35, "DuRetrieval (cmn-Hans)": 6.62, "EcomRetrieval (cmn-Hans)": 4.01, "FEVER": 55.9, "FiQA-PL (pol-Latn)": 5.82, "FiQA2018": 37.27, "GerDaLIRSmall (deu-Latn)": 1.35, "HellaSwag": 24.08, "HotpotQA": 44.59, "LEMBNarrativeQARetrieval": 19.64, "LEMBNeedleRetrieval": 12.25, "LEMBPasskeyRetrieval": 14.75, "LEMBQMSumRetrieval": 13.08, "LEMBSummScreenFDRetrieval": 46.98, "LEMBWikimQARetrieval": 44.88, "LeCaRDv2 (zho-Hans)": 18.77, "LegalBenchConsumerContractsQA": 60.21, "LegalBenchCorporateLobbying": 88.69, "LegalQuAD (deu-Latn)": 7.44, "LegalSummarization": 57.43, "MMarcoRetrieval (cmn-Hans)": 7.46, "MSMARCO": 39.03, "MedicalRetrieval (cmn-Hans)": 2.3, "MintakaRetrieval (ara-Arab)": 2.74, "MintakaRetrieval (deu-Latn)": 20.04, "MintakaRetrieval (spa-Latn)": 11.76, "MintakaRetrieval (fra-Latn)": 16.08, "MintakaRetrieval (hin-Deva)": 3.04, "MintakaRetrieval (ita-Latn)": 11.83, "MintakaRetrieval (jpn-Hira)": 7.31, "MintakaRetrieval (por-Latn)": 13.66, "NFCorpus": 32.25, "NFCorpus-PL (pol-Latn)": 15.43, "NQ": 46.47, "PIQA": 26.44, "Quail": 3.08, "QuoraRetrieval": 87.75, "RARbCode": 42.44, "RARbMath": 66.36, "RuBQRetrieval (rus-Cyrl)": 8.84, "SCIDOCS": 21.82, "SCIDOCS-PL (pol-Latn)": 5.34, "SIQA": 2.09, "SciFact": 62.64, "SciFact-PL (pol-Latn)": 22.48, "SpartQA": 2.67, "SyntecRetrieval (fra-Latn)": 60.8, "T2Retrieval (cmn-Hans)": 4.82, "TRECCOVID": 50.82, "TRECCOVID-PL (pol-Latn)": 16.52, "TempReasonL1": 1.66, "TempReasonL2Fact": 10.31, "TempReasonL2Pure": 0.63, "TempReasonL3Fact": 11.11, "TempReasonL3Pure": 6.63, "Touche2020": 17.22, "VideoRetrieval (cmn-Hans)": 9.38, "WinoGrande": 27.2, "XPQARetrieval (ara-Arab_ara-Arab)": 7.83, "XPQARetrieval (eng-Latn_ara-Arab)": 2.52, "XPQARetrieval (ara-Arab_eng-Latn)": 8.88, "XPQARetrieval (deu-Latn_deu-Latn)": 56.77, "XPQARetrieval (eng-Latn_deu-Latn)": 18.2, "XPQARetrieval (deu-Latn_eng-Latn)": 30.06, "XPQARetrieval (spa-Latn_spa-Latn)": 42.22, "XPQARetrieval (eng-Latn_spa-Latn)": 7.53, "XPQARetrieval (spa-Latn_eng-Latn)": 26.27, "XPQARetrieval (fra-Latn_fra-Latn)": 55.9, "XPQARetrieval (eng-Latn_fra-Latn)": 14.89, "XPQARetrieval (fra-Latn_eng-Latn)": 34.2, "XPQARetrieval (hin-Deva_hin-Deva)": 33.26, "XPQARetrieval (eng-Latn_hin-Deva)": 6.44, "XPQARetrieval (hin-Deva_eng-Latn)": 6.98, "XPQARetrieval (ita-Latn_ita-Latn)": 58.68, "XPQARetrieval (eng-Latn_ita-Latn)": 8.56, "XPQARetrieval (ita-Latn_eng-Latn)": 28.71, "XPQARetrieval (jpn-Hira_jpn-Hira)": 39.53, "XPQARetrieval (eng-Latn_jpn-Hira)": 5.7, "XPQARetrieval (jpn-Hira_eng-Latn)": 13.75, "XPQARetrieval (kor-Hang_kor-Hang)": 13.48, "XPQARetrieval (eng-Latn_kor-Hang)": 7.43, "XPQARetrieval (kor-Hang_eng-Latn)": 7.34, "XPQARetrieval (pol-Latn_pol-Latn)": 28.07, "XPQARetrieval (eng-Latn_pol-Latn)": 10.03, "XPQARetrieval (pol-Latn_eng-Latn)": 16.58, "XPQARetrieval (por-Latn_por-Latn)": 34.09, "XPQARetrieval (eng-Latn_por-Latn)": 7.38, "XPQARetrieval (por-Latn_eng-Latn)": 22.59, "XPQARetrieval (tam-Taml_tam-Taml)": 9.13, "XPQARetrieval (eng-Latn_tam-Taml)": 4.15, "XPQARetrieval (tam-Taml_eng-Latn)": 3.76, "XPQARetrieval (cmn-Hans_cmn-Hans)": 21.09, "XPQARetrieval (eng-Latn_cmn-Hans)": 6.58, "XPQARetrieval (cmn-Hans_eng-Latn)": 9.39, "XPQARetrieval (fr)": 55.9 } ] }, "STS": { "spearman": [ { "Model": "all-MiniLM-L12-v2", "AFQMC (cmn-Hans)": 7.94, "ATEC (cmn-Hans)": 12.97, "BIOSSES": 83.57, "BQ (cmn-Hans)": 23.31, "CDSC-R (pol-Latn)": 82.5, "LCQMC (cmn-Hans)": 21.04, "PAWSX (cmn-Hans)": 7.31, "RUParaPhraserSTS (rus-Cyrl)": 45.47, "RuSTSBenchmarkSTS (rus-Cyrl)": 56.33, "SICK-R": 79.32, "SICK-R-PL (pol-Latn)": 54.26, "SICKFr (fra-Latn)": 63.16, "STS12": 73.08, "STS13": 82.13, "STS14": 76.73, "STS15": 85.58, "STS16": 80.23, "STS17 (nld-Latn_eng-Latn)": 24.51, "STS17 (eng-Latn_ara-Arab)": 0.54, "STS17 (ara-Arab)": 58.71, "STS17 (kor-Hang)": 43.37, "STS17 (eng-Latn_tur-Latn)": 0.43, "STS17 (ita-Latn_eng-Latn)": 24.28, "STS17 (eng-Latn_deu-Latn)": 27.54, "STS17 (fra-Latn_eng-Latn)": 30.7, "STS17 (spa-Latn)": 78.37, "STS17 (en-en)": 88.63, "STS17 (spa-Latn_eng-Latn)": 22.01, "STS17 (ar-ar)": 58.71, "STS17 (en-ar)": 0.54, "STS17 (en-de)": 27.54, "STS17 (en-tr)": 0.43, "STS17 (es-en)": 22.01, "STS17 (es-es)": 78.37, "STS17 (fr-en)": 30.7, "STS17 (it-en)": 24.28, "STS17 (ko-ko)": 43.37, "STS17 (nl-en)": 24.51, "STS22 (ara-Arab)": 17.54, "STS22 (cmn-Hans)": 33.15, "STS22 (fra-Latn)": 69.51, "STS22 (deu-Latn_eng-Latn)": 42.86, "STS22 (pol-Latn)": 19.22, "STS22 (spa-Latn_eng-Latn)": 53.99, "STS22 (pol-Latn_eng-Latn)": 42.67, "STS22 (tur-Latn)": 21.6, "STS22 (deu-Latn_fra-Latn)": 43.52, "STS22 (fra-Latn_pol-Latn)": 16.9, "STS22 (deu-Latn)": 22.53, "STS22 (deu-Latn_pol-Latn)": 1.63, "STS22 (en)": 65.67, "STS22 (spa-Latn)": 43.98, "STS22 (cmn-Hans_eng-Latn)": 44.39, "STS22 (spa-Latn_ita-Latn)": 40.71, "STS22 (ita-Latn)": 47.48, "STS22 (rus-Cyrl)": 11.19, "STS22 (ar)": 17.54, "STS22 (de)": 22.53, "STS22 (de-en)": 42.86, "STS22 (de-fr)": 43.52, "STS22 (de-pl)": 1.63, "STS22 (es)": 43.98, "STS22 (es-en)": 53.99, "STS22 (es-it)": 40.71, "STS22 (fr)": 69.51, "STS22 (fr-pl)": 16.9, "STS22 (it)": 47.48, "STS22 (pl)": 19.22, "STS22 (pl-en)": 42.67, "STS22 (ru)": 11.19, "STS22 (tr)": 21.6, "STS22 (zh)": 33.15, "STS22 (zh-en)": 44.39, "STSB (cmn-Hans)": 36.66, "STSBenchmark": 83.09, "STSBenchmarkMultilingualSTS (nld-Latn)": 60.03, "STSBenchmarkMultilingualSTS (spa-Latn)": 65.33, "STSBenchmarkMultilingualSTS (ita-Latn)": 60.71, "STSBenchmarkMultilingualSTS (cmn-Hans)": 38.93, "STSBenchmarkMultilingualSTS (en)": 83.09, "STSBenchmarkMultilingualSTS (por-Latn)": 63.85, "STSBenchmarkMultilingualSTS (rus-Cyrl)": 56.09, "STSBenchmarkMultilingualSTS (fra-Latn)": 66.68, "STSBenchmarkMultilingualSTS (pol-Latn)": 60.2, "STSBenchmarkMultilingualSTS (deu-Latn)": 63.28 } ] }, "Summarization": { "spearman": [ { "Model": "all-MiniLM-L12-v2", "SummEval": 27.9, "SummEvalFr (fra-Latn)": 26.63 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "all-MiniLM-L12-v2" } ] } }, "rubert-tiny2": { "BitextMining": { "f1": [ { "Model": "rubert-tiny2" } ] }, "Classification": { "accuracy": [ { "Model": "rubert-tiny2", "GeoreviewClassification (rus-Cyrl)": 39.64, "HeadlineClassification (rus-Cyrl)": 74.19, "InappropriatenessClassification (rus-Cyrl)": 58.57, "KinopoiskClassification (rus-Cyrl)": 49.06, "MassiveIntentClassification (rus-Cyrl)": 50.83, "MassiveScenarioClassification (rus-Cyrl)": 59.15, "RuReviewsClassification (rus-Cyrl)": 56.99, "RuSciBenchGRNTIClassification (rus-Cyrl)": 45.63, "RuSciBenchOECDClassification (rus-Cyrl)": 35.48 } ] }, "Clustering": { "v_measure": [ { "Model": "rubert-tiny2", "GeoreviewClusteringP2P (rus-Cyrl)": 44.18, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 41.41, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 38.09 } ] }, "PairClassification": { "ap": [ { "Model": "rubert-tiny2", "TERRa (rus-Cyrl)": 51.87 } ] }, "Reranking": { "map": [ { "Model": "rubert-tiny2", "RuBQReranking (rus-Cyrl)": 46.09 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "rubert-tiny2", "RiaNewsRetrieval (rus-Cyrl)": 13.92, "RuBQRetrieval (rus-Cyrl)": 10.87 } ] }, "STS": { "spearman": [ { "Model": "rubert-tiny2", "RUParaPhraserSTS (rus-Cyrl)": 65.14, "RuSTSBenchmarkSTS (rus-Cyrl)": 69.43, "STS22 (rus-Cyrl)": 50.23 } ] }, "Summarization": { "spearman": [ { "Model": "rubert-tiny2" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "rubert-tiny2" } ] } }, "electra-small-swedish-cased-discriminator": { "BitextMining": { "f1": [ { "Model": "electra-small-swedish-cased-discriminator", "BornholmBitextMining": 0.85 } ] }, "Classification": { "accuracy": [ { "Model": "electra-small-swedish-cased-discriminator", "AngryTweetsClassification": 40.52, "DKHateClassification": 52.28, "DanishPoliticalCommentsClassification": 25.17, "LccSentimentClassification": 36.67, "MassiveIntentClassification (da)": 6.51, "MassiveIntentClassification (nb)": 5.66, "MassiveIntentClassification (sv)": 6.6, "MassiveScenarioClassification (da)": 11.5, "MassiveScenarioClassification (nb)": 11.26, "MassiveScenarioClassification (sv)": 12.16, "NoRecClassification": 39.72, "NordicLangClassification": 44.53, "NorwegianParliament": 52.44, "ScalaDaClassification": 51.66, "ScalaNbClassification": 52.41 } ] }, "Clustering": { "v_measure": [ { "Model": "electra-small-swedish-cased-discriminator" } ] }, "PairClassification": { "ap": [ { "Model": "electra-small-swedish-cased-discriminator" } ] }, "Reranking": { "map": [ { "Model": "electra-small-swedish-cased-discriminator" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "electra-small-swedish-cased-discriminator" } ] }, "STS": { "spearman": [ { "Model": "electra-small-swedish-cased-discriminator" } ] }, "Summarization": { "spearman": [ { "Model": "electra-small-swedish-cased-discriminator" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "electra-small-swedish-cased-discriminator" } ] } }, "e5-base-4k": { "BitextMining": { "f1": [ { "Model": "e5-base-4k" } ] }, "Classification": { "accuracy": [ { "Model": "e5-base-4k" } ] }, "Clustering": { "v_measure": [ { "Model": "e5-base-4k" } ] }, "PairClassification": { "ap": [ { "Model": "e5-base-4k" } ] }, "Reranking": { "map": [ { "Model": "e5-base-4k" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "e5-base-4k", "LEMBNarrativeQARetrieval": 30.35, "LEMBNeedleRetrieval": 41.5, "LEMBPasskeyRetrieval": 67.25, "LEMBQMSumRetrieval": 35.6, "LEMBSummScreenFDRetrieval": 95.23, "LEMBWikimQARetrieval": 69.19 } ] }, "STS": { "spearman": [ { "Model": "e5-base-4k" } ] }, "Summarization": { "spearman": [ { "Model": "e5-base-4k" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "e5-base-4k" } ] } }, "instructor-base": { "BitextMining": { "f1": [ { "Model": "instructor-base" } ] }, "Classification": { "accuracy": [ { "Model": "instructor-base" } ] }, "Clustering": { "v_measure": [ { "Model": "instructor-base" } ] }, "PairClassification": { "ap": [ { "Model": "instructor-base" } ] }, "Reranking": { "map": [ { "Model": "instructor-base" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "instructor-base" } ] }, "STS": { "spearman": [ { "Model": "instructor-base" } ] }, "Summarization": { "spearman": [ { "Model": "instructor-base" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "instructor-base", "Core17InstructionRetrieval": -1.09, "News21InstructionRetrieval": -1.78, "Robust04InstructionRetrieval": -10.42 } ] } }, "distiluse-base-multilingual-cased-v2": { "BitextMining": { "f1": [ { "Model": "distiluse-base-multilingual-cased-v2" } ] }, "Classification": { "accuracy": [ { "Model": "distiluse-base-multilingual-cased-v2", "AllegroReviews": 28.03, "AmazonCounterfactualClassification (de)": 68.14, "AmazonCounterfactualClassification (en)": 71.81, "AmazonCounterfactualClassification (en-ext)": 72.96, "AmazonCounterfactualClassification (ja)": 65.39, "AmazonPolarityClassification": 68.0, "AmazonReviewsClassification (de)": 35.03, "AmazonReviewsClassification (en)": 35.45, "AmazonReviewsClassification (es)": 36.24, "AmazonReviewsClassification (fr)": 35.7, "AmazonReviewsClassification (ja)": 31.08, "AmazonReviewsClassification (zh)": 33.89, "Banking77Classification": 71.48, "CBD": 60.0, "EmotionClassification": 40.04, "ImdbClassification": 61.52, "MTOPDomainClassification (de)": 86.19, "MTOPDomainClassification (en)": 91.59, "MTOPDomainClassification (es)": 87.75, "MTOPDomainClassification (fr)": 84.61, "MTOPDomainClassification (hi)": 76.41, "MTOPDomainClassification (th)": 73.62, "MTOPIntentClassification (de)": 59.21, "MTOPIntentClassification (en)": 66.4, "MTOPIntentClassification (es)": 57.21, "MTOPIntentClassification (fr)": 53.41, "MTOPIntentClassification (hi)": 45.54, "MTOPIntentClassification (th)": 47.73, "MasakhaNEWSClassification (fra)": 76.87, "MassiveIntentClassification (af)": 40.02, "MassiveIntentClassification (am)": 2.35, "MassiveIntentClassification (ar)": 43.14, "MassiveIntentClassification (az)": 25.6, "MassiveIntentClassification (bn)": 4.84, "MassiveIntentClassification (cy)": 15.43, "MassiveIntentClassification (da)": 52.33, "MassiveIntentClassification (de)": 51.57, "MassiveIntentClassification (el)": 49.65, "MassiveIntentClassification (en)": 66.71, "MassiveIntentClassification (es)": 56.57, "MassiveIntentClassification (fa)": 55.36, "MassiveIntentClassification (fi)": 45.72, "MassiveIntentClassification (fr)": 57.02, "MassiveIntentClassification (he)": 46.74, "MassiveIntentClassification (hi)": 48.55, "MassiveIntentClassification (hu)": 50.65, "MassiveIntentClassification (hy)": 40.79, "MassiveIntentClassification (id)": 56.0, "MassiveIntentClassification (is)": 16.08, "MassiveIntentClassification (it)": 57.65, "MassiveIntentClassification (ja)": 55.33, "MassiveIntentClassification (jv)": 28.16, "MassiveIntentClassification (ka)": 29.41, "MassiveIntentClassification (km)": 4.79, "MassiveIntentClassification (kn)": 3.37, "MassiveIntentClassification (ko)": 49.97, "MassiveIntentClassification (lv)": 44.31, "MassiveIntentClassification (ml)": 3.24, "MassiveIntentClassification (mn)": 40.37, "MassiveIntentClassification (ms)": 47.97, "MassiveIntentClassification (my)": 38.48, "MassiveIntentClassification (nb)": 46.01, "MassiveIntentClassification (nl)": 58.29, "MassiveIntentClassification (pl)": 53.1, "MassiveIntentClassification (pt)": 58.63, "MassiveIntentClassification (ro)": 50.63, "MassiveIntentClassification (ru)": 57.96, "MassiveIntentClassification (sl)": 50.66, "MassiveIntentClassification (sq)": 50.25, "MassiveIntentClassification (sv)": 52.41, "MassiveIntentClassification (sw)": 19.29, "MassiveIntentClassification (ta)": 3.79, "MassiveIntentClassification (te)": 3.36, "MassiveIntentClassification (th)": 45.28, "MassiveIntentClassification (tl)": 28.44, "MassiveIntentClassification (tr)": 50.47, "MassiveIntentClassification (ur)": 46.03, "MassiveIntentClassification (vi)": 45.25, "MassiveIntentClassification (zh-CN)": 59.22, "MassiveIntentClassification (zh-TW)": 54.96, "MassiveScenarioClassification (af)": 53.67, "MassiveScenarioClassification (am)": 7.72, "MassiveScenarioClassification (ar)": 52.19, "MassiveScenarioClassification (az)": 34.75, "MassiveScenarioClassification (bn)": 10.65, "MassiveScenarioClassification (cy)": 21.24, "MassiveScenarioClassification (da)": 62.55, "MassiveScenarioClassification (de)": 61.4, "MassiveScenarioClassification (el)": 60.68, "MassiveScenarioClassification (en)": 74.0, "MassiveScenarioClassification (es)": 64.61, "MassiveScenarioClassification (fa)": 59.24, "MassiveScenarioClassification (fi)": 54.66, "MassiveScenarioClassification (fr)": 65.2, "MassiveScenarioClassification (he)": 54.74, "MassiveScenarioClassification (hi)": 55.99, "MassiveScenarioClassification (hu)": 61.2, "MassiveScenarioClassification (hy)": 49.63, "MassiveScenarioClassification (id)": 65.25, "MassiveScenarioClassification (is)": 22.6, "MassiveScenarioClassification (it)": 64.63, "MassiveScenarioClassification (ja)": 62.32, "MassiveScenarioClassification (jv)": 35.77, "MassiveScenarioClassification (ka)": 39.08, "MassiveScenarioClassification (km)": 9.24, "MassiveScenarioClassification (kn)": 8.28, "MassiveScenarioClassification (ko)": 57.6, "MassiveScenarioClassification (lv)": 51.72, "MassiveScenarioClassification (ml)": 8.25, "MassiveScenarioClassification (mn)": 47.21, "MassiveScenarioClassification (ms)": 55.65, "MassiveScenarioClassification (my)": 43.31, "MassiveScenarioClassification (nb)": 54.98, "MassiveScenarioClassification (nl)": 67.49, "MassiveScenarioClassification (pl)": 61.29, "MassiveScenarioClassification (pt)": 64.26, "MassiveScenarioClassification (ro)": 58.03, "MassiveScenarioClassification (ru)": 65.41, "MassiveScenarioClassification (sl)": 59.36, "MassiveScenarioClassification (sq)": 62.69, "MassiveScenarioClassification (sv)": 64.35, "MassiveScenarioClassification (sw)": 25.12, "MassiveScenarioClassification (ta)": 8.67, "MassiveScenarioClassification (te)": 7.82, "MassiveScenarioClassification (th)": 54.65, "MassiveScenarioClassification (tl)": 36.09, "MassiveScenarioClassification (tr)": 60.89, "MassiveScenarioClassification (ur)": 54.71, "MassiveScenarioClassification (vi)": 55.15, "MassiveScenarioClassification (zh-CN)": 66.44, "MassiveScenarioClassification (zh-TW)": 62.89, "PAC": 68.17, "PolEmo2.0-IN": 48.84, "PolEmo2.0-OUT": 30.0, "ToxicConversationsClassification": 69.09, "TweetSentimentExtractionClassification": 59.97 } ] }, "Clustering": { "v_measure": [ { "Model": "distiluse-base-multilingual-cased-v2", "8TagsClustering": 12.51, "AlloProfClusteringP2P": 55.95, "AlloProfClusteringS2S": 35.39, "ArxivClusteringP2P": 33.59, "HALClusteringS2S": 18.2, "MLSUMClusteringP2P": 40.17, "MLSUMClusteringS2S": 34.65, "MasakhaNEWSClusteringP2P (fra)": 53.76, "MasakhaNEWSClusteringS2S (fra)": 32.76 } ] }, "PairClassification": { "ap": [ { "Model": "distiluse-base-multilingual-cased-v2", "CDSC-E": 71.83, "OpusparcusPC (fr)": 92.07, "PPC": 86.83, "PSC": 96.35, "PawsXPairClassification (fr)": 51.08, "SICK-E-PL": 62.05, "SprintDuplicateQuestions": 87.15, "TwitterSemEval2015": 61.67, "TwitterURLCorpus": 84.02 } ] }, "Reranking": { "map": [ { "Model": "distiluse-base-multilingual-cased-v2", "AlloprofReranking": 51.77, "AskUbuntuDupQuestions": 53.75, "MindSmallReranking": 30.39, "SciDocsRR": 69.22, "StackOverflowDupQuestions": 41.92, "SyntecReranking": 74.78 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "distiluse-base-multilingual-cased-v2", "AlloprofRetrieval": 26.99, "ArguAna-PL": 36.7, "BSARDRetrieval": 0.0, "DBPedia-PL": 12.36, "FiQA-PL": 8.02, "HotpotQA-PL": 20.83, "MSMARCO-PL": 4.57, "MintakaRetrieval (fr)": 22.55, "NFCorpus-PL": 16.28, "NQ-PL": 5.85, "Quora-PL": 71.95, "SCIDOCS-PL": 6.5, "SciFact-PL": 33.03, "SyntecRetrieval": 65.34, "TRECCOVID-PL": 16.91, "XPQARetrieval (fr)": 51.2 } ] }, "STS": { "spearman": [ { "Model": "distiluse-base-multilingual-cased-v2", "BIOSSES": 78.34, "CDSC-R": 87.67, "SICK-R": 75.25, "SICK-R-PL": 65.53, "SICKFr": 72.49, "STS12": 72.96, "STS13": 70.58, "STS14": 70.29, "STS15": 81.94, "STS16": 76.8, "STS17 (ar-ar)": 77.34, "STS17 (en-ar)": 77.46, "STS17 (en-de)": 80.24, "STS17 (en-en)": 86.19, "STS17 (en-tr)": 74.34, "STS17 (es-en)": 77.4, "STS17 (es-es)": 83.71, "STS17 (fr-en)": 79.28, "STS17 (it-en)": 80.82, "STS17 (ko-ko)": 76.4, "STS17 (nl-en)": 80.51, "STS22 (ar)": 49.04, "STS22 (de)": 35.73, "STS22 (de-en)": 47.51, "STS22 (de-fr)": 60.76, "STS22 (de-pl)": 36.09, "STS22 (en)": 62.88, "STS22 (es)": 59.34, "STS22 (es-en)": 68.96, "STS22 (es-it)": 63.28, "STS22 (fr)": 76.41, "STS22 (fr-pl)": 61.98, "STS22 (it)": 65.1, "STS22 (pl)": 34.58, "STS22 (pl-en)": 71.33, "STS22 (ru)": 52.4, "STS22 (tr)": 54.07, "STS22 (zh)": 54.32, "STS22 (zh-en)": 61.75, "STSBenchmark": 80.75, "STSBenchmarkMultilingualSTS (fr)": 77.49 } ] }, "Summarization": { "spearman": [ { "Model": "distiluse-base-multilingual-cased-v2", "SummEvalFr": 28.12 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "distiluse-base-multilingual-cased-v2" } ] } }, "all-MiniLM-L6-v2": { "BitextMining": { "f1": [ { "Model": "all-MiniLM-L6-v2", "BornholmBitextMining": 29.68, "BornholmBitextMining (dan-Latn)": 29.68, "Tatoeba (kab-Latn_eng-Latn)": 0.96, "Tatoeba (aze-Latn_eng-Latn)": 1.04, "Tatoeba (wuu-Hans_eng-Latn)": 0.6, "Tatoeba (fra-Latn_eng-Latn)": 8.17, "Tatoeba (nov-Latn_eng-Latn)": 13.97, "Tatoeba (slk-Latn_eng-Latn)": 3.27, "Tatoeba (mkd-Cyrl_eng-Latn)": 0.0, "Tatoeba (ukr-Cyrl_eng-Latn)": 0.3, "Tatoeba (kur-Latn_eng-Latn)": 5.21, "Tatoeba (hin-Deva_eng-Latn)": 0.0, "Tatoeba (tgl-Latn_eng-Latn)": 2.69, "Tatoeba (jav-Latn_eng-Latn)": 3.37, "Tatoeba (nob-Latn_eng-Latn)": 4.34, "Tatoeba (tam-Taml_eng-Latn)": 0.33, "Tatoeba (hsb-Latn_eng-Latn)": 2.65, "Tatoeba (srp-Cyrl_eng-Latn)": 1.28, "Tatoeba (cat-Latn_eng-Latn)": 6.93, "Tatoeba (jpn-Jpan_eng-Latn)": 0.97, "Tatoeba (kzj-Latn_eng-Latn)": 2.78, "Tatoeba (uig-Arab_eng-Latn)": 0.2, "Tatoeba (max-Deva_eng-Latn)": 6.93, "Tatoeba (dtp-Latn_eng-Latn)": 1.88, "Tatoeba (cbk-Latn_eng-Latn)": 7.04, "Tatoeba (bre-Latn_eng-Latn)": 3.22, "Tatoeba (arz-Arab_eng-Latn)": 0.0, "Tatoeba (heb-Hebr_eng-Latn)": 0.22, "Tatoeba (kat-Geor_eng-Latn)": 0.3, "Tatoeba (yid-Hebr_eng-Latn)": 0.14, "Tatoeba (lit-Latn_eng-Latn)": 0.92, "Tatoeba (ber-Tfng_eng-Latn)": 4.69, "Tatoeba (hun-Latn_eng-Latn)": 3.56, "Tatoeba (mhr-Cyrl_eng-Latn)": 0.0, "Tatoeba (isl-Latn_eng-Latn)": 2.37, "Tatoeba (ind-Latn_eng-Latn)": 3.86, "Tatoeba (tuk-Latn_eng-Latn)": 3.52, "Tatoeba (kor-Hang_eng-Latn)": 0.45, "Tatoeba (ara-Arab_eng-Latn)": 0.0, "Tatoeba (tzl-Latn_eng-Latn)": 4.58, "Tatoeba (swe-Latn_eng-Latn)": 6.06, "Tatoeba (ang-Latn_eng-Latn)": 15.64, "Tatoeba (mon-Cyrl_eng-Latn)": 0.38, "Tatoeba (urd-Arab_eng-Latn)": 0.1, "Tatoeba (vie-Latn_eng-Latn)": 3.07, "Tatoeba (ina-Latn_eng-Latn)": 17.63, "Tatoeba (hrv-Latn_eng-Latn)": 3.83, "Tatoeba (war-Latn_eng-Latn)": 4.94, "Tatoeba (cor-Latn_eng-Latn)": 2.41, "Tatoeba (tur-Latn_eng-Latn)": 3.59, "Tatoeba (bul-Cyrl_eng-Latn)": 0.21, "Tatoeba (spa-Latn_eng-Latn)": 5.63, "Tatoeba (tel-Telu_eng-Latn)": 0.46, "Tatoeba (nds-Latn_eng-Latn)": 9.56, "Tatoeba (lvs-Latn_eng-Latn)": 2.61, "Tatoeba (amh-Ethi_eng-Latn)": 0.25, "Tatoeba (pms-Latn_eng-Latn)": 7.62, "Tatoeba (xho-Latn_eng-Latn)": 4.01, "Tatoeba (epo-Latn_eng-Latn)": 5.46, "Tatoeba (por-Latn_eng-Latn)": 8.29, "Tatoeba (ile-Latn_eng-Latn)": 13.54, "Tatoeba (ell-Grek_eng-Latn)": 0.1, "Tatoeba (oci-Latn_eng-Latn)": 6.55, "Tatoeba (pes-Arab_eng-Latn)": 0.0, "Tatoeba (tat-Cyrl_eng-Latn)": 0.44, "Tatoeba (awa-Deva_eng-Latn)": 0.51, "Tatoeba (fao-Latn_eng-Latn)": 5.33, "Tatoeba (swg-Latn_eng-Latn)": 8.92, "Tatoeba (uzb-Latn_eng-Latn)": 2.34, "Tatoeba (cym-Latn_eng-Latn)": 6.09, "Tatoeba (mar-Deva_eng-Latn)": 0.0, "Tatoeba (fry-Latn_eng-Latn)": 11.22, "Tatoeba (ces-Latn_eng-Latn)": 3.04, "Tatoeba (afr-Latn_eng-Latn)": 5.89, "Tatoeba (csb-Latn_eng-Latn)": 3.78, "Tatoeba (pol-Latn_eng-Latn)": 2.58, "Tatoeba (gla-Latn_eng-Latn)": 2.7, "Tatoeba (deu-Latn_eng-Latn)": 7.89, "Tatoeba (cmn-Hans_eng-Latn)": 1.92, "Tatoeba (ita-Latn_eng-Latn)": 9.9, "Tatoeba (ben-Beng_eng-Latn)": 0.0, "Tatoeba (glg-Latn_eng-Latn)": 9.31, "Tatoeba (dsb-Latn_eng-Latn)": 2.9, "Tatoeba (pam-Latn_eng-Latn)": 3.54, "Tatoeba (ast-Latn_eng-Latn)": 6.84, "Tatoeba (bos-Latn_eng-Latn)": 5.58, "Tatoeba (nld-Latn_eng-Latn)": 10.16, "Tatoeba (bel-Cyrl_eng-Latn)": 0.5, "Tatoeba (orv-Cyrl_eng-Latn)": 0.0, "Tatoeba (gsw-Latn_eng-Latn)": 11.33, "Tatoeba (dan-Latn_eng-Latn)": 7.84, "Tatoeba (hye-Armn_eng-Latn)": 0.41, "Tatoeba (mal-Mlym_eng-Latn)": 0.15, "Tatoeba (arq-Arab_eng-Latn)": 0.11, "Tatoeba (kaz-Cyrl_eng-Latn)": 0.42, "Tatoeba (khm-Khmr_eng-Latn)": 0.42, "Tatoeba (tha-Thai_eng-Latn)": 0.3, "Tatoeba (swh-Latn_eng-Latn)": 5.8, "Tatoeba (gle-Latn_eng-Latn)": 2.75, "Tatoeba (ceb-Latn_eng-Latn)": 3.39, "Tatoeba (sqi-Latn_eng-Latn)": 3.58, "Tatoeba (slv-Latn_eng-Latn)": 3.25, "Tatoeba (ido-Latn_eng-Latn)": 7.48, "Tatoeba (yue-Hant_eng-Latn)": 0.86, "Tatoeba (nno-Latn_eng-Latn)": 5.38, "Tatoeba (est-Latn_eng-Latn)": 2.36, "Tatoeba (lfn-Latn_eng-Latn)": 4.55, "Tatoeba (lat-Latn_eng-Latn)": 5.04, "Tatoeba (cha-Latn_eng-Latn)": 13.29, "Tatoeba (eus-Latn_eng-Latn)": 5.54, "Tatoeba (fin-Latn_eng-Latn)": 2.79, "Tatoeba (rus-Cyrl_eng-Latn)": 0.07, "Tatoeba (ron-Latn_eng-Latn)": 6.82, "Tatoeba (zsm-Latn_eng-Latn)": 4.24 } ] }, "Classification": { "accuracy": [ { "Model": "all-MiniLM-L6-v2", "AllegroReviews (pol-Latn)": 24.64, "AmazonCounterfactualClassification (en)": 63.64, "AmazonCounterfactualClassification (en-ext)": 65.59, "AmazonCounterfactualClassification (deu-Latn)": 57.82, "AmazonCounterfactualClassification (jpn-Jpan)": 60.9, "AmazonPolarityClassification": 64.26, "AmazonReviewsClassification (en)": 30.85, "AmazonReviewsClassification (deu-Latn)": 26.44, "AmazonReviewsClassification (spa-Latn)": 27.35, "AmazonReviewsClassification (fra-Latn)": 26.88, "AmazonReviewsClassification (jpn-Jpan)": 23.78, "AmazonReviewsClassification (cmn-Hans)": 23.67, "AngryTweetsClassification": 42.49, "AngryTweetsClassification (dan-Latn)": 42.48, "Banking77Classification": 80.04, "CBD (pol-Latn)": 50.9, "DKHateClassification": 55.05, "DanishPoliticalCommentsClassification": 26.96, "DanishPoliticalCommentsClassification (dan-Latn)": 26.7, "EmotionClassification": 40.83, "GeoreviewClassification (rus-Cyrl)": 27.08, "HeadlineClassification (rus-Cyrl)": 27.77, "IFlyTek (cmn-Hans)": 16.09, "ImdbClassification": 61.76, "InappropriatenessClassification (rus-Cyrl)": 51.73, "JDReview (cmn-Hans)": 59.98, "KinopoiskClassification (rus-Cyrl)": 33.93, "LccSentimentClassification": 38.47, "LccSentimentClassification (dan-Latn)": 38.53, "MTOPDomainClassification (en)": 91.68, "MTOPDomainClassification (deu-Latn)": 70.47, "MTOPDomainClassification (spa-Latn)": 72.99, "MTOPDomainClassification (fra-Latn)": 75.1, "MTOPDomainClassification (hin-Deva)": 40.74, "MTOPDomainClassification (tha-Thai)": 15.66, "MTOPIntentClassification (en)": 61.55, "MTOPIntentClassification (deu-Latn)": 45.7, "MTOPIntentClassification (spa-Latn)": 44.19, "MTOPIntentClassification (fra-Latn)": 39.67, "MTOPIntentClassification (hin-Deva)": 18.69, "MTOPIntentClassification (tha-Thai)": 5.78, "MasakhaNEWSClassification (fra)": 74.05, "MasakhaNEWSClassification (amh-Ethi)": 33.03, "MasakhaNEWSClassification (eng)": 77.11, "MasakhaNEWSClassification (fra-Latn)": 68.84, "MasakhaNEWSClassification (hau-Latn)": 50.49, "MasakhaNEWSClassification (ibo-Latn)": 52.15, "MasakhaNEWSClassification (lin-Latn)": 68.29, "MasakhaNEWSClassification (lug-Latn)": 47.58, "MasakhaNEWSClassification (orm-Ethi)": 50.68, "MasakhaNEWSClassification (pcm-Latn)": 92.56, "MasakhaNEWSClassification (run-Latn)": 54.81, "MasakhaNEWSClassification (sna-Latn)": 65.58, "MasakhaNEWSClassification (som-Latn)": 39.8, "MasakhaNEWSClassification (swa-Latn)": 47.25, "MasakhaNEWSClassification (tir-Ethi)": 28.97, "MasakhaNEWSClassification (xho-Latn)": 54.14, "MasakhaNEWSClassification (yor-Latn)": 55.01, "MassiveIntentClassification (en)": 66.94, "MassiveIntentClassification (da)": 40.99, "MassiveIntentClassification (nb)": 39.34, "MassiveIntentClassification (sv)": 38.1, "MassiveIntentClassification (aze-Latn)": 30.63, "MassiveIntentClassification (spa-Latn)": 39.88, "MassiveIntentClassification (tam-Taml)": 11.31, "MassiveIntentClassification (swe-Latn)": 38.09, "MassiveIntentClassification (fas-Arab)": 19.1, "MassiveIntentClassification (khm-Khmr)": 4.89, "MassiveIntentClassification (mon-Cyrl)": 20.35, "MassiveIntentClassification (hye-Armn)": 7.62, "MassiveIntentClassification (kan-Knda)": 3.14, "MassiveIntentClassification (cmo-Hans)": 24.4, "MassiveIntentClassification (rus-Cyrl)": 27.58, "MassiveIntentClassification (jpn-Jpan)": 31.87, "MassiveIntentClassification (deu-Latn)": 43.44, "MassiveIntentClassification (ind-Latn)": 39.02, "MassiveIntentClassification (cym-Latn)": 34.54, "MassiveIntentClassification (nld-Latn)": 40.2, "MassiveIntentClassification (hin-Deva)": 17.7, "MassiveIntentClassification (afr-Latn)": 37.45, "MassiveIntentClassification (ell-Grek)": 24.19, "MassiveIntentClassification (mal-Mlym)": 2.87, "MassiveIntentClassification (por-Latn)": 43.76, "MassiveIntentClassification (sqi-Latn)": 40.7, "MassiveIntentClassification (urd-Arab)": 14.42, "MassiveIntentClassification (vie-Latn)": 37.09, "MassiveIntentClassification (hun-Latn)": 35.69, "MassiveIntentClassification (ron-Latn)": 40.54, "MassiveIntentClassification (ara-Arab)": 19.05, "MassiveIntentClassification (nob-Latn)": 39.36, "MassiveIntentClassification (slv-Latn)": 36.7, "MassiveIntentClassification (lav-Latn)": 36.97, "MassiveIntentClassification (heb-Hebr)": 22.48, "MassiveIntentClassification (pol-Latn)": 36.07, "MassiveIntentClassification (ita-Latn)": 41.59, "MassiveIntentClassification (msa-Latn)": 35.07, "MassiveIntentClassification (mya-Mymr)": 4.24, "MassiveIntentClassification (isl-Latn)": 29.95, "MassiveIntentClassification (tel-Telu)": 2.46, "MassiveIntentClassification (swa-Latn)": 34.98, "MassiveIntentClassification (amh-Ethi)": 2.62, "MassiveIntentClassification (cmo-Hant)": 22.56, "MassiveIntentClassification (tha-Thai)": 11.26, "MassiveIntentClassification (ben-Beng)": 13.1, "MassiveIntentClassification (fin-Latn)": 38.37, "MassiveIntentClassification (fra-Latn)": 42.55, "MassiveIntentClassification (kor-Kore)": 16.05, "MassiveIntentClassification (kat-Geor)": 9.07, "MassiveIntentClassification (dan-Latn)": 41.0, "MassiveIntentClassification (tur-Latn)": 33.76, "MassiveIntentClassification (tgl-Latn)": 37.92, "MassiveIntentClassification (jav-Latn)": 35.91, "MassiveScenarioClassification (en)": 73.81, "MassiveScenarioClassification (da)": 47.01, "MassiveScenarioClassification (nb)": 44.67, "MassiveScenarioClassification (sv)": 42.93, "MassiveScenarioClassification (mal-Mlym)": 7.67, "MassiveScenarioClassification (khm-Khmr)": 9.25, "MassiveScenarioClassification (deu-Latn)": 51.47, "MassiveScenarioClassification (msa-Latn)": 43.67, "MassiveScenarioClassification (heb-Hebr)": 24.01, "MassiveScenarioClassification (mon-Cyrl)": 25.47, "MassiveScenarioClassification (mya-Mymr)": 10.61, "MassiveScenarioClassification (ind-Latn)": 43.46, "MassiveScenarioClassification (nob-Latn)": 44.67, "MassiveScenarioClassification (fra-Latn)": 51.14, "MassiveScenarioClassification (tgl-Latn)": 45.69, "MassiveScenarioClassification (amh-Ethi)": 7.57, "MassiveScenarioClassification (fas-Arab)": 23.97, "MassiveScenarioClassification (vie-Latn)": 40.47, "MassiveScenarioClassification (sqi-Latn)": 47.21, "MassiveScenarioClassification (dan-Latn)": 47.02, "MassiveScenarioClassification (spa-Latn)": 49.0, "MassiveScenarioClassification (pol-Latn)": 43.82, "MassiveScenarioClassification (tel-Telu)": 7.95, "MassiveScenarioClassification (tha-Thai)": 19.5, "MassiveScenarioClassification (kor-Kore)": 20.3, "MassiveScenarioClassification (cmo-Hans)": 33.65, "MassiveScenarioClassification (urd-Arab)": 23.73, "MassiveScenarioClassification (aze-Latn)": 35.59, "MassiveScenarioClassification (ron-Latn)": 48.23, "MassiveScenarioClassification (jav-Latn)": 43.59, "MassiveScenarioClassification (slv-Latn)": 41.9, "MassiveScenarioClassification (kat-Geor)": 14.92, "MassiveScenarioClassification (lav-Latn)": 40.43, "MassiveScenarioClassification (cym-Latn)": 39.0, "MassiveScenarioClassification (swe-Latn)": 42.95, "MassiveScenarioClassification (rus-Cyrl)": 30.46, "MassiveScenarioClassification (ben-Beng)": 20.56, "MassiveScenarioClassification (por-Latn)": 50.72, "MassiveScenarioClassification (hye-Armn)": 13.03, "MassiveScenarioClassification (jpn-Jpan)": 37.3, "MassiveScenarioClassification (nld-Latn)": 48.43, "MassiveScenarioClassification (swa-Latn)": 43.32, "MassiveScenarioClassification (tam-Taml)": 17.37, "MassiveScenarioClassification (isl-Latn)": 36.12, "MassiveScenarioClassification (kan-Knda)": 7.85, "MassiveScenarioClassification (ell-Grek)": 31.3, "MassiveScenarioClassification (tur-Latn)": 38.85, "MassiveScenarioClassification (cmo-Hant)": 31.18, "MassiveScenarioClassification (fin-Latn)": 42.38, "MassiveScenarioClassification (hin-Deva)": 23.71, "MassiveScenarioClassification (ara-Arab)": 25.99, "MassiveScenarioClassification (hun-Latn)": 41.61, "MassiveScenarioClassification (afr-Latn)": 43.87, "MassiveScenarioClassification (ita-Latn)": 49.8, "MultilingualSentiment (cmn-Hans)": 41.28, "NoRecClassification": 40.02, "NoRecClassification (nob-Latn)": 37.93, "NordicLangClassification": 54.71, "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 54.7, "NorwegianParliament": 54.8, "OnlineShopping (cmn-Hans)": 57.74, "PAC (pol-Latn)": 59.78, "PolEmo2.0-IN (pol-Latn)": 40.29, "PolEmo2.0-OUT (pol-Latn)": 25.0, "RuReviewsClassification (rus-Cyrl)": 41.79, "RuSciBenchGRNTIClassification (rus-Cyrl)": 10.08, "RuSciBenchOECDClassification (rus-Cyrl)": 8.3, "ScalaDaClassification": 50.03, "ScalaNbClassification": 50.17, "TNews (cmn-Hans)": 20.12, "ToxicConversationsClassification": 62.09, "TweetSentimentExtractionClassification": 54.04, "Waimai (cmn-Hans)": 62.72 } ] }, "Clustering": { "v_measure": [ { "Model": "all-MiniLM-L6-v2", "AlloProfClusteringP2P": 51.83, "AlloProfClusteringS2S": 32.07, "ArxivClusteringP2P": 46.55, "ArxivClusteringS2S": 37.86, "BiorxivClusteringP2P": 38.37, "BiorxivClusteringS2S": 32.88, "GeoreviewClusteringP2P (rus-Cyrl)": 20.25, "HALClusteringS2S": 18.84, "MLSUMClusteringP2P": 36.74, "MLSUMClusteringP2P (rus-Cyrl)": 23.91, "MLSUMClusteringS2S": 28.12, "MLSUMClusteringS2S (rus-Cyrl)": 19.07, "MasakhaNEWSClusteringP2P (fra)": 34.92, "MasakhaNEWSClusteringP2P (amh-Ethi)": 43.85, "MasakhaNEWSClusteringP2P (eng)": 48.88, "MasakhaNEWSClusteringP2P (fra-Latn)": 34.92, "MasakhaNEWSClusteringP2P (hau-Latn)": 24.77, "MasakhaNEWSClusteringP2P (ibo-Latn)": 45.94, "MasakhaNEWSClusteringP2P (lin-Latn)": 69.56, "MasakhaNEWSClusteringP2P (lug-Latn)": 49.4, "MasakhaNEWSClusteringP2P (orm-Ethi)": 25.34, "MasakhaNEWSClusteringP2P (pcm-Latn)": 85.57, "MasakhaNEWSClusteringP2P (run-Latn)": 50.75, "MasakhaNEWSClusteringP2P (sna-Latn)": 41.68, "MasakhaNEWSClusteringP2P (som-Latn)": 29.02, "MasakhaNEWSClusteringP2P (swa-Latn)": 21.87, "MasakhaNEWSClusteringP2P (tir-Ethi)": 42.93, "MasakhaNEWSClusteringP2P (xho-Latn)": 28.58, "MasakhaNEWSClusteringP2P (yor-Latn)": 31.45, "MasakhaNEWSClusteringS2S (fra)": 40.58, "MasakhaNEWSClusteringS2S (amh-Ethi)": 45.44, "MasakhaNEWSClusteringS2S (eng)": 41.09, "MasakhaNEWSClusteringS2S (fra-Latn)": 40.58, "MasakhaNEWSClusteringS2S (hau-Latn)": 15.42, "MasakhaNEWSClusteringS2S (ibo-Latn)": 37.02, "MasakhaNEWSClusteringS2S (lin-Latn)": 65.14, "MasakhaNEWSClusteringS2S (lug-Latn)": 44.21, "MasakhaNEWSClusteringS2S (orm-Ethi)": 24.79, "MasakhaNEWSClusteringS2S (pcm-Latn)": 61.48, "MasakhaNEWSClusteringS2S (run-Latn)": 51.25, "MasakhaNEWSClusteringS2S (sna-Latn)": 42.74, "MasakhaNEWSClusteringS2S (som-Latn)": 30.08, "MasakhaNEWSClusteringS2S (swa-Latn)": 9.55, "MasakhaNEWSClusteringS2S (tir-Ethi)": 46.04, "MasakhaNEWSClusteringS2S (xho-Latn)": 27.08, "MasakhaNEWSClusteringS2S (yor-Latn)": 31.04, "MedrxivClusteringP2P": 34.39, "MedrxivClusteringS2S": 31.86, "RedditClustering": 50.7, "RedditClusteringP2P": 54.8, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 10.21, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 9.43, "StackExchangeClustering": 53.14, "StackExchangeClusteringP2P": 34.26, "TwentyNewsgroupsClustering": 46.49 } ] }, "PairClassification": { "ap": [ { "Model": "all-MiniLM-L6-v2", "CDSC-E (pol-Latn)": 47.27, "OpusparcusPC (fr)": 86.53, "OpusparcusPC (deu-Latn)": 89.91, "OpusparcusPC (en)": 97.46, "OpusparcusPC (fin-Latn)": 85.44, "OpusparcusPC (fra-Latn)": 86.53, "OpusparcusPC (rus-Cyrl)": 79.28, "OpusparcusPC (swe-Latn)": 83.78, "PSC (pol-Latn)": 81.87, "PawsXPairClassification (fr)": 55.4, "PawsXPairClassification (deu-Latn)": 51.22, "PawsXPairClassification (en)": 59.1, "PawsXPairClassification (spa-Latn)": 52.21, "PawsXPairClassification (fra-Latn)": 55.41, "PawsXPairClassification (jpn-Hira)": 48.97, "PawsXPairClassification (kor-Hang)": 50.53, "PawsXPairClassification (cmn-Hans)": 53.11, "SICK-E-PL (pol-Latn)": 47.32, "SprintDuplicateQuestions": 94.55, "TERRa (rus-Cyrl)": 45.03, "TwitterSemEval2015": 67.86, "TwitterURLCorpus": 84.7 } ] }, "Reranking": { "map": [ { "Model": "all-MiniLM-L6-v2", "AlloprofReranking": 31.69, "AlloprofReranking (fra-Latn)": 62.62, "AskUbuntuDupQuestions": 63.48, "MMarcoReranking (cmn-Hans)": 4.74, "MindSmallReranking": 30.8, "RuBQReranking (rus-Cyrl)": 27.05, "SciDocsRR": 87.12, "StackOverflowDupQuestions": 50.76, "SyntecReranking": 59.57, "SyntecReranking (fra-Latn)": 67.31, "T2Reranking (cmn-Hans)": 56.26 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "all-MiniLM-L6-v2", "AILACasedocs": 19.72, "AILAStatutes": 20.52, "ARCChallenge": 9.48, "AlloprofRetrieval": 28.41, "AlloprofRetrieval (fra-Latn)": 28.41, "AlphaNLI": 28.19, "ArguAna": 50.17, "ArguAna-PL (pol-Latn)": 11.5, "BSARDRetrieval": 0.0, "BSARDRetrieval (fra-Latn)": 4.8, "CQADupstackRetrieval": 41.32, "ClimateFEVER": 20.27, "CmedqaRetrieval (cmn-Hans)": 2.03, "CovidRetrieval (cmn-Hans)": 0.8, "DBPedia": 32.33, "DuRetrieval (cmn-Hans)": 3.03, "EcomRetrieval (cmn-Hans)": 3.7, "FEVER": 51.93, "FiQA-PL (pol-Latn)": 2.29, "FiQA2018": 36.87, "GerDaLIRSmall (deu-Latn)": 2.41, "HellaSwag": 24.21, "HotpotQA": 46.51, "LEMBNarrativeQARetrieval": 18.27, "LEMBNeedleRetrieval": 20.0, "LEMBPasskeyRetrieval": 23.25, "LEMBQMSumRetrieval": 16.32, "LEMBSummScreenFDRetrieval": 54.8, "LEMBWikimQARetrieval": 46.23, "LeCaRDv2 (zho-Hans)": 17.5, "LegalBenchConsumerContractsQA": 65.6, "LegalBenchCorporateLobbying": 86.41, "LegalQuAD (deu-Latn)": 11.81, "LegalSummarization": 59.0, "MMarcoRetrieval (cmn-Hans)": 6.21, "MSMARCO": 36.54, "MedicalRetrieval (cmn-Hans)": 1.76, "MintakaRetrieval (fr)": 9.19, "MintakaRetrieval (ara-Arab)": 2.22, "MintakaRetrieval (deu-Latn)": 15.43, "MintakaRetrieval (spa-Latn)": 7.72, "MintakaRetrieval (fra-Latn)": 9.19, "MintakaRetrieval (hin-Deva)": 2.65, "MintakaRetrieval (ita-Latn)": 8.48, "MintakaRetrieval (jpn-Hira)": 6.7, "MintakaRetrieval (por-Latn)": 9.76, "NFCorpus": 31.59, "NFCorpus-PL (pol-Latn)": 10.62, "NQ": 43.87, "PIQA": 25.28, "Quail": 3.92, "QuoraRetrieval": 87.56, "RARbCode": 44.27, "RARbMath": 68.19, "RiaNewsRetrieval (rus-Cyrl)": 0.67, "RuBQRetrieval (rus-Cyrl)": 2.64, "SCIDOCS": 21.64, "SCIDOCS-PL (pol-Latn)": 3.75, "SIQA": 1.56, "SciFact": 64.51, "SciFact-PL (pol-Latn)": 16.14, "SpartQA": 1.65, "SyntecRetrieval": 60.15, "SyntecRetrieval (fra-Latn)": 60.15, "T2Retrieval (cmn-Hans)": 1.6, "TRECCOVID": 47.25, "TRECCOVID-PL (pol-Latn)": 8.66, "TempReasonL1": 1.53, "TempReasonL2Fact": 17.65, "TempReasonL2Pure": 0.46, "TempReasonL3Fact": 14.16, "TempReasonL3Pure": 6.33, "Touche2020": 16.9, "VideoRetrieval (cmn-Hans)": 9.79, "WinoGrande": 47.33, "XPQARetrieval (fr)": 51.79, "XPQARetrieval (ara-Arab_ara-Arab)": 8.03, "XPQARetrieval (eng-Latn_ara-Arab)": 1.86, "XPQARetrieval (ara-Arab_eng-Latn)": 6.87, "XPQARetrieval (deu-Latn_deu-Latn)": 53.25, "XPQARetrieval (eng-Latn_deu-Latn)": 10.99, "XPQARetrieval (deu-Latn_eng-Latn)": 27.59, "XPQARetrieval (spa-Latn_spa-Latn)": 38.87, "XPQARetrieval (eng-Latn_spa-Latn)": 5.46, "XPQARetrieval (spa-Latn_eng-Latn)": 22.2, "XPQARetrieval (fra-Latn_fra-Latn)": 51.79, "XPQARetrieval (eng-Latn_fra-Latn)": 8.57, "XPQARetrieval (fra-Latn_eng-Latn)": 31.36, "XPQARetrieval (hin-Deva_hin-Deva)": 35.3, "XPQARetrieval (eng-Latn_hin-Deva)": 6.28, "XPQARetrieval (hin-Deva_eng-Latn)": 6.0, "XPQARetrieval (ita-Latn_ita-Latn)": 54.57, "XPQARetrieval (eng-Latn_ita-Latn)": 6.79, "XPQARetrieval (ita-Latn_eng-Latn)": 24.13, "XPQARetrieval (jpn-Hira_jpn-Hira)": 39.23, "XPQARetrieval (eng-Latn_jpn-Hira)": 4.1, "XPQARetrieval (jpn-Hira_eng-Latn)": 13.05, "XPQARetrieval (kor-Hang_kor-Hang)": 10.24, "XPQARetrieval (eng-Latn_kor-Hang)": 5.72, "XPQARetrieval (kor-Hang_eng-Latn)": 6.37, "XPQARetrieval (pol-Latn_pol-Latn)": 22.33, "XPQARetrieval (eng-Latn_pol-Latn)": 7.58, "XPQARetrieval (pol-Latn_eng-Latn)": 14.43, "XPQARetrieval (por-Latn_por-Latn)": 31.93, "XPQARetrieval (eng-Latn_por-Latn)": 5.9, "XPQARetrieval (por-Latn_eng-Latn)": 20.74, "XPQARetrieval (tam-Taml_tam-Taml)": 7.43, "XPQARetrieval (eng-Latn_tam-Taml)": 3.42, "XPQARetrieval (tam-Taml_eng-Latn)": 2.91, "XPQARetrieval (cmn-Hans_cmn-Hans)": 19.39, "XPQARetrieval (eng-Latn_cmn-Hans)": 5.05, "XPQARetrieval (cmn-Hans_eng-Latn)": 8.77 } ] }, "STS": { "spearman": [ { "Model": "all-MiniLM-L6-v2", "AFQMC (cmn-Hans)": 8.59, "ATEC (cmn-Hans)": 13.52, "BIOSSES": 81.64, "BQ (cmn-Hans)": 23.84, "CDSC-R (pol-Latn)": 79.45, "LCQMC (cmn-Hans)": 23.85, "PAWSX (cmn-Hans)": 7.21, "RUParaPhraserSTS (rus-Cyrl)": 43.93, "RuSTSBenchmarkSTS (rus-Cyrl)": 55.56, "SICK-R": 77.58, "SICK-R-PL (pol-Latn)": 52.43, "SICKFr": 62.48, "SICKFr (fra-Latn)": 62.48, "STS12": 72.37, "STS13": 80.6, "STS14": 75.59, "STS15": 85.39, "STS16": 78.99, "STS17 (ar-ar)": 50.89, "STS17 (en-ar)": -4.28, "STS17 (en-de)": 35.82, "STS17 (en-en)": 87.59, "STS17 (en-tr)": 4.5, "STS17 (es-en)": 16.31, "STS17 (es-es)": 76.12, "STS17 (fr-en)": 37.09, "STS17 (it-en)": 24.45, "STS17 (ko-ko)": 43.39, "STS17 (nl-en)": 29.0, "STS17 (ara-Arab)": 50.89, "STS17 (spa-Latn_eng-Latn)": 16.31, "STS17 (kor-Hang)": 43.39, "STS17 (eng-Latn_tur-Latn)": 4.5, "STS17 (fra-Latn_eng-Latn)": 37.09, "STS17 (nld-Latn_eng-Latn)": 29.0, "STS17 (eng-Latn_ara-Arab)": -4.28, "STS17 (spa-Latn)": 76.12, "STS17 (eng-Latn_deu-Latn)": 35.82, "STS17 (ita-Latn_eng-Latn)": 24.45, "STS22 (ar)": 22.64, "STS22 (de)": 31.04, "STS22 (de-en)": 44.04, "STS22 (de-fr)": 30.07, "STS22 (de-pl)": 4.93, "STS22 (en)": 67.71, "STS22 (es)": 54.78, "STS22 (es-en)": 53.42, "STS22 (es-it)": 44.27, "STS22 (fr)": 77.0, "STS22 (fr-pl)": 50.71, "STS22 (it)": 60.4, "STS22 (pl)": 26.77, "STS22 (pl-en)": 32.8, "STS22 (ru)": 14.72, "STS22 (tr)": 33.69, "STS22 (zh)": 44.93, "STS22 (zh-en)": 41.64, "STS22 (tur-Latn)": 33.69, "STS22 (spa-Latn)": 54.78, "STS22 (ara-Arab)": 22.64, "STS22 (deu-Latn_pol-Latn)": -4.93, "STS22 (spa-Latn_eng-Latn)": 53.42, "STS22 (cmn-Hans_eng-Latn)": 41.64, "STS22 (rus-Cyrl)": 14.72, "STS22 (spa-Latn_ita-Latn)": 44.27, "STS22 (deu-Latn_fra-Latn)": 30.07, "STS22 (deu-Latn)": 31.04, "STS22 (fra-Latn_pol-Latn)": 50.71, "STS22 (pol-Latn)": 26.77, "STS22 (pol-Latn_eng-Latn)": 32.8, "STS22 (deu-Latn_eng-Latn)": 44.04, "STS22 (ita-Latn)": 60.4, "STS22 (fra-Latn)": 77.0, "STS22 (cmn-Hans)": 44.93, "STSB (cmn-Hans)": 37.8, "STSBenchmark": 82.03, "STSBenchmarkMultilingualSTS (fr)": 64.93, "STSBenchmarkMultilingualSTS (pol-Latn)": 56.42, "STSBenchmarkMultilingualSTS (por-Latn)": 61.56, "STSBenchmarkMultilingualSTS (ita-Latn)": 59.24, "STSBenchmarkMultilingualSTS (fra-Latn)": 64.93, "STSBenchmarkMultilingualSTS (deu-Latn)": 62.4, "STSBenchmarkMultilingualSTS (cmn-Hans)": 39.74, "STSBenchmarkMultilingualSTS (spa-Latn)": 61.62, "STSBenchmarkMultilingualSTS (rus-Cyrl)": 55.55, "STSBenchmarkMultilingualSTS (en)": 82.03, "STSBenchmarkMultilingualSTS (nld-Latn)": 55.46 } ] }, "Summarization": { "spearman": [ { "Model": "all-MiniLM-L6-v2", "SummEval": 30.81, "SummEvalFr": 28.28, "SummEvalFr (fra-Latn)": 28.29 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "all-MiniLM-L6-v2" } ] } }, "sentence-croissant-llm-base": { "BitextMining": { "f1": [ { "Model": "sentence-croissant-llm-base" } ] }, "Classification": { "accuracy": [ { "Model": "sentence-croissant-llm-base", "AmazonReviewsClassification (fr)": 34.79, "MTOPDomainClassification (fr)": 85.52, "MTOPIntentClassification (fr)": 63.12, "MasakhaNEWSClassification (fra)": 79.29, "MassiveIntentClassification (fr)": 59.41, "MassiveScenarioClassification (fr)": 65.29 } ] }, "Clustering": { "v_measure": [ { "Model": "sentence-croissant-llm-base", "AlloProfClusteringP2P": 64.12, "AlloProfClusteringS2S": 32.52, "HALClusteringS2S": 23.4, "MLSUMClusteringP2P": 42.94, "MLSUMClusteringS2S": 33.91, "MasakhaNEWSClusteringP2P (fra)": 53.94, "MasakhaNEWSClusteringS2S (fra)": 41.05 } ] }, "PairClassification": { "ap": [ { "Model": "sentence-croissant-llm-base", "OpusparcusPC (fr)": 91.42, "PawsXPairClassification (fr)": 63.13 } ] }, "Reranking": { "map": [ { "Model": "sentence-croissant-llm-base", "AlloprofReranking": 53.0, "SyntecReranking": 82.9 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "sentence-croissant-llm-base", "AlloprofRetrieval": 29.97, "BSARDRetrieval": 0.0, "MintakaRetrieval (fr)": 21.31, "SyntecRetrieval": 74.2, "XPQARetrieval (fr)": 58.57 } ] }, "STS": { "spearman": [ { "Model": "sentence-croissant-llm-base", "SICKFr": 69.6, "STS22 (fr)": 78.77, "STSBenchmarkMultilingualSTS (fr)": 79.23 } ] }, "Summarization": { "spearman": [ { "Model": "sentence-croissant-llm-base", "SummEvalFr": 29.04 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "sentence-croissant-llm-base" } ] } }, "all-mpnet-base-v2-instruct": { "BitextMining": { "f1": [ { "Model": "all-mpnet-base-v2-instruct" } ] }, "Classification": { "accuracy": [ { "Model": "all-mpnet-base-v2-instruct" } ] }, "Clustering": { "v_measure": [ { "Model": "all-mpnet-base-v2-instruct" } ] }, "PairClassification": { "ap": [ { "Model": "all-mpnet-base-v2-instruct" } ] }, "Reranking": { "map": [ { "Model": "all-mpnet-base-v2-instruct" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "all-mpnet-base-v2-instruct", "ARCChallenge": 10.35, "AlphaNLI": 1.96, "HellaSwag": 13.01, "PIQA": 27.18, "Quail": 3.02, "RARbCode": 48.95, "RARbMath": 69.21, "SIQA": 1.29, "SpartQA": 1.01, "TempReasonL1": 1.52, "TempReasonL2Fact": 7.28, "TempReasonL2Pure": 1.03, "TempReasonL3Fact": 7.03, "TempReasonL3Pure": 5.16, "WinoGrande": 9.66 } ] }, "STS": { "spearman": [ { "Model": "all-mpnet-base-v2-instruct" } ] }, "Summarization": { "spearman": [ { "Model": "all-mpnet-base-v2-instruct" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "all-mpnet-base-v2-instruct" } ] } }, "bge-large-zh-v1.5": { "BitextMining": { "f1": [ { "Model": "bge-large-zh-v1.5" } ] }, "Classification": { "accuracy": [ { "Model": "bge-large-zh-v1.5", "AmazonReviewsClassification (zh)": 41.38, "IFlyTek": 48.74, "JDReview": 85.14, "MassiveIntentClassification (zh-CN)": 68.84, "MassiveScenarioClassification (zh-CN)": 74.7, "MultilingualSentiment": 72.97, "OnlineShopping": 91.43, "TNews": 52.1, "Waimai": 86.9 } ] }, "Clustering": { "v_measure": [ { "Model": "bge-large-zh-v1.5", "CLSClusteringP2P": 41.44, "CLSClusteringS2S": 38.33, "ThuNewsClusteringP2P": 59.61, "ThuNewsClusteringS2S": 56.58 } ] }, "PairClassification": { "ap": [ { "Model": "bge-large-zh-v1.5", "Cmnli": 85.27, "Ocnli": 77.94 } ] }, "Reranking": { "map": [ { "Model": "bge-large-zh-v1.5", "CMedQAv1": 83.45, "CMedQAv2": 85.44, "MMarcoReranking": 28.74, "T2Reranking": 65.74 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "bge-large-zh-v1.5", "CmedqaRetrieval": 42.57, "CovidRetrieval": 73.35, "DuRetrieval": 86.32, "EcomRetrieval": 65.33, "MMarcoRetrieval": 79.23, "MedicalRetrieval": 59.59, "T2Retrieval": 83.99, "VideoRetrieval": 73.32 } ] }, "STS": { "spearman": [ { "Model": "bge-large-zh-v1.5", "AFQMC": 44.36, "ATEC": 49.54, "BQ": 62.94, "LCQMC": 74.33, "PAWSX": 33.92, "QBQTC": 37.29, "STS22 (zh)": 68.94, "STSB": 78.7 } ] }, "Summarization": { "spearman": [ { "Model": "bge-large-zh-v1.5" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "bge-large-zh-v1.5" } ] } }, "text-embedding-ada-002": { "BitextMining": { "f1": [ { "Model": "text-embedding-ada-002" } ] }, "Classification": { "accuracy": [ { "Model": "text-embedding-ada-002", "AmazonCounterfactualClassification (en)": 75.94, "AmazonPolarityClassification": 86.72, "AmazonReviewsClassification (zh)": 38.3, "AmazonReviewsClassification (en)": 44.78, "AmazonReviewsClassification (fr)": 43.76, "Banking77Classification": 80.66, "EmotionClassification": 48.74, "IFlyTek": 44.62, "ImdbClassification": 77.98, "JDReview": 74.6, "MTOPDomainClassification (en)": 92.13, "MTOPDomainClassification (fr)": 89.38, "MTOPIntentClassification (en)": 64.68, "MTOPIntentClassification (fr)": 64.45, "MasakhaNEWSClassification (fra)": 81.52, "MassiveIntentClassification (zh-CN)": 64.81, "MassiveIntentClassification (en)": 70.15, "MassiveIntentClassification (fr)": 65.42, "MassiveScenarioClassification (zh-CN)": 71.4, "MassiveScenarioClassification (en)": 75.33, "MassiveScenarioClassification (fr)": 71.11, "MultilingualSentiment": 67.99, "OnlineShopping": 88.94, "TNews": 45.77, "ToxicConversationsClassification": 72.29, "TweetSentimentExtractionClassification": 61.81, "Waimai": 82.37 } ] }, "Clustering": { "v_measure": [ { "Model": "text-embedding-ada-002", "AlloProfClusteringP2P": 64.83, "AlloProfClusteringS2S": 53.52, "ArxivClusteringP2P": 45.01, "ArxivClusteringS2S": 36.85, "BiorxivClusteringP2P": 36.66, "BiorxivClusteringS2S": 34.21, "CLSClusteringP2P": 38.26, "CLSClusteringS2S": 35.91, "HALClusteringS2S": 26.18, "MLSUMClusteringP2P": 44.59, "MLSUMClusteringS2S": 41.67, "MasakhaNEWSClusteringP2P (fra)": 68.35, "MasakhaNEWSClusteringS2S (fra)": 48.58, "MedrxivClusteringP2P": 32.6, "MedrxivClusteringS2S": 30.8, "RedditClustering": 61.42, "RedditClusteringP2P": 64.13, "StackExchangeClustering": 72.22, "StackExchangeClusteringP2P": 38.49, "ThuNewsClusteringP2P": 58.71, "ThuNewsClusteringS2S": 49.86, "TwentyNewsgroupsClustering": 52.56 } ] }, "PairClassification": { "ap": [ { "Model": "text-embedding-ada-002", "Cmnli": 76.03, "Ocnli": 63.08, "OpusparcusPC (fr)": 94.12, "PawsXPairClassification (fr)": 60.16, "SprintDuplicateQuestions": 92.17, "TwitterSemEval2015": 75.28, "TwitterURLCorpus": 87.22 } ] }, "Reranking": { "map": [ { "Model": "text-embedding-ada-002", "AskUbuntuDupQuestions": 62.05, "CMedQAv1": 63.08, "CMedQAv2": 64.02, "MMarcoReranking": 23.39, "MindSmallReranking": 31.45, "SciDocsRR": 81.22, "StackOverflowDupQuestions": 50.54, "SyntecReranking": 89.87, "T2Reranking": 66.65 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "text-embedding-ada-002", "ARCChallenge": 13.3, "AlloprofRetrieval": 51.64, "AlphaNLI": 25.65, "ArguAna": 57.44, "BSARDRetrieval": 0.61, "CQADupstackRetrieval": 41.69, "ClimateFEVER": 21.64, "CmedqaRetrieval": 22.36, "CovidRetrieval": 57.21, "DBPedia": 39.39, "DuRetrieval": 71.17, "EcomRetrieval": 44.49, "FEVER": 74.99, "FiQA2018": 44.41, "HellaSwag": 29.29, "HotpotQA": 60.9, "MMarcoRetrieval": 69.86, "MSMARCO": 40.91, "MedicalRetrieval": 37.92, "MintakaRetrieval (fr)": 29.94, "NFCorpus": 36.97, "NQ": 51.58, "PIQA": 31.02, "Quail": 5.83, "QuoraRetrieval": 87.6, "RARbCode": 83.39, "RARbMath": 73.21, "SCIDOCS": 18.36, "SIQA": 3.14, "SciFact": 72.75, "SpartQA": 4.23, "SyntecRetrieval": 85.97, "T2Retrieval": 69.14, "TRECCOVID": 68.47, "TempReasonL1": 1.68, "TempReasonL2Fact": 19.93, "TempReasonL2Pure": 2.6, "TempReasonL3Fact": 18.02, "TempReasonL3Pure": 7.58, "Touche2020": 21.61, "VideoRetrieval": 43.85, "WinoGrande": 19.65, "XPQARetrieval (fr)": 73.0 } ] }, "STS": { "spearman": [ { "Model": "text-embedding-ada-002", "AFQMC": 23.88, "ATEC": 29.25, "BIOSSES": 86.35, "BQ": 45.33, "LCQMC": 68.41, "PAWSX": 16.55, "QBQTC": 30.27, "SICK-R": 80.6, "SICKFr": 76.28, "STS12": 69.8, "STS13": 83.27, "STS14": 76.09, "STS15": 86.12, "STS16": 85.96, "STS17 (en-en)": 90.25, "STS22 (zh)": 62.53, "STS22 (en)": 68.12, "STS22 (tr)": 64.5, "STS22 (fr)": 81.09, "STSB": 70.61, "STSBenchmark": 83.17, "STSBenchmarkMultilingualSTS (fr)": 77.55 } ] }, "Summarization": { "spearman": [ { "Model": "text-embedding-ada-002", "SummEval": 30.8, "SummEvalFr": 30.5 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "text-embedding-ada-002" } ] } }, "LLM2Vec-Meta-Llama-3-supervised": { "BitextMining": { "f1": [ { "Model": "LLM2Vec-Meta-Llama-3-supervised" } ] }, "Classification": { "accuracy": [ { "Model": "LLM2Vec-Meta-Llama-3-supervised", "AmazonCounterfactualClassification (en)": 79.94, "AmazonPolarityClassification": 86.07, "AmazonReviewsClassification (en)": 46.84, "Banking77Classification": 88.05, "EmotionClassification": 51.2, "ImdbClassification": 82.94, "MTOPDomainClassification (en)": 96.14, "MTOPIntentClassification (en)": 86.11, "MassiveIntentClassification (en)": 79.8, "MassiveScenarioClassification (en)": 81.52, "ToxicConversationsClassification": 70.59, "TweetSentimentExtractionClassification": 61.9 } ] }, "Clustering": { "v_measure": [ { "Model": "LLM2Vec-Meta-Llama-3-supervised", "ArxivClusteringP2P": 44.27, "ArxivClusteringS2S": 46.85, "BiorxivClusteringP2P": 32.35, "BiorxivClusteringS2S": 36.7, "MedrxivClusteringP2P": 30.71, "MedrxivClusteringS2S": 32.96, "RedditClustering": 61.72, "RedditClusteringP2P": 63.98, "StackExchangeClustering": 72.74, "StackExchangeClusteringP2P": 32.26, "TwentyNewsgroupsClustering": 56.41 } ] }, "PairClassification": { "ap": [ { "Model": "LLM2Vec-Meta-Llama-3-supervised", "SprintDuplicateQuestions": 95.09, "TwitterSemEval2015": 81.73, "TwitterURLCorpus": 86.56 } ] }, "Reranking": { "map": [ { "Model": "LLM2Vec-Meta-Llama-3-supervised", "AskUbuntuDupQuestions": 65.19, "MindSmallReranking": 32.67, "SciDocsRR": 86.05, "StackOverflowDupQuestions": 54.82 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "LLM2Vec-Meta-Llama-3-supervised", "ArguAna": 62.78, "CQADupstackRetrieval": 48.25, "ClimateFEVER": 34.27, "DBPedia": 48.34, "FEVER": 90.2, "FiQA2018": 55.33, "HotpotQA": 71.76, "MSMARCO": 43.24, "NFCorpus": 41.83, "NQ": 64.21, "QuoraRetrieval": 87.16, "SCIDOCS": 22.96, "SciFact": 78.22, "TRECCOVID": 80.34, "Touche2020": 20.5 } ] }, "STS": { "spearman": [ { "Model": "LLM2Vec-Meta-Llama-3-supervised", "BIOSSES": 84.92, "SICK-R": 83.94, "STS12": 79.27, "STS13": 84.83, "STS14": 82.94, "STS15": 88.09, "STS16": 86.54, "STS17 (en-en)": 89.58, "STS22 (en)": 67.67, "STSBenchmark": 88.05 } ] }, "Summarization": { "spearman": [ { "Model": "LLM2Vec-Meta-Llama-3-supervised", "SummEval": 30.94 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "LLM2Vec-Meta-Llama-3-supervised" } ] } }, "gtr-t5-xl": { "BitextMining": { "f1": [ { "Model": "gtr-t5-xl", "BUCC (de-en)": 90.99, "BUCC (fr-en)": 88.55, "BUCC (ru-en)": 2.07, "BUCC (zh-en)": 1.49, "Tatoeba (afr-eng)": 33.47, "Tatoeba (amh-eng)": 0.01, "Tatoeba (ang-eng)": 30.74, "Tatoeba (ara-eng)": 0.47, "Tatoeba (arq-eng)": 0.34, "Tatoeba (arz-eng)": 0.14, "Tatoeba (ast-eng)": 51.74, "Tatoeba (awa-eng)": 0.49, "Tatoeba (aze-eng)": 7.43, "Tatoeba (bel-eng)": 3.45, "Tatoeba (ben-eng)": 0.06, "Tatoeba (ber-eng)": 5.79, "Tatoeba (bos-eng)": 17.43, "Tatoeba (bre-eng)": 5.69, "Tatoeba (bul-eng)": 7.55, "Tatoeba (cat-eng)": 48.06, "Tatoeba (cbk-eng)": 54.56, "Tatoeba (ceb-eng)": 8.72, "Tatoeba (ces-eng)": 8.76, "Tatoeba (cha-eng)": 27.56, "Tatoeba (cmn-eng)": 2.26, "Tatoeba (cor-eng)": 3.69, "Tatoeba (csb-eng)": 13.18, "Tatoeba (cym-eng)": 6.97, "Tatoeba (dan-eng)": 47.36, "Tatoeba (deu-eng)": 91.54, "Tatoeba (dsb-eng)": 13.2, "Tatoeba (dtp-eng)": 4.54, "Tatoeba (ell-eng)": 0.55, "Tatoeba (epo-eng)": 27.86, "Tatoeba (est-eng)": 5.13, "Tatoeba (eus-eng)": 10.23, "Tatoeba (fao-eng)": 21.44, "Tatoeba (fin-eng)": 6.62, "Tatoeba (fra-eng)": 79.66, "Tatoeba (fry-eng)": 32.92, "Tatoeba (gla-eng)": 2.87, "Tatoeba (gle-eng)": 3.26, "Tatoeba (glg-eng)": 63.81, "Tatoeba (gsw-eng)": 29.71, "Tatoeba (heb-eng)": 0.33, "Tatoeba (hin-eng)": 0.25, "Tatoeba (hrv-eng)": 17.16, "Tatoeba (hsb-eng)": 12.02, "Tatoeba (hun-eng)": 7.21, "Tatoeba (hye-eng)": 0.78, "Tatoeba (ido-eng)": 40.83, "Tatoeba (ile-eng)": 54.95, "Tatoeba (ina-eng)": 72.28, "Tatoeba (ind-eng)": 30.95, "Tatoeba (isl-eng)": 11.29, "Tatoeba (ita-eng)": 73.83, "Tatoeba (jav-eng)": 8.66, "Tatoeba (jpn-eng)": 0.61, "Tatoeba (kab-eng)": 1.78, "Tatoeba (kat-eng)": 0.79, "Tatoeba (kaz-eng)": 0.95, "Tatoeba (khm-eng)": 0.49, "Tatoeba (kor-eng)": 1.87, "Tatoeba (kur-eng)": 10.91, "Tatoeba (kzj-eng)": 5.72, "Tatoeba (lat-eng)": 18.24, "Tatoeba (lfn-eng)": 43.49, "Tatoeba (lit-eng)": 7.13, "Tatoeba (lvs-eng)": 7.04, "Tatoeba (mal-eng)": 0.44, "Tatoeba (mar-eng)": 0.03, "Tatoeba (max-eng)": 18.99, "Tatoeba (mhr-eng)": 1.11, "Tatoeba (mkd-eng)": 2.49, "Tatoeba (mon-eng)": 2.01, "Tatoeba (nds-eng)": 39.96, "Tatoeba (nld-eng)": 58.86, "Tatoeba (nno-eng)": 29.07, "Tatoeba (nob-eng)": 40.25, "Tatoeba (nov-eng)": 50.19, "Tatoeba (oci-eng)": 30.72, "Tatoeba (orv-eng)": 0.85, "Tatoeba (pam-eng)": 7.21, "Tatoeba (pes-eng)": 0.53, "Tatoeba (pms-eng)": 31.07, "Tatoeba (pol-eng)": 18.06, "Tatoeba (por-eng)": 81.92, "Tatoeba (ron-eng)": 62.6, "Tatoeba (rus-eng)": 22.24, "Tatoeba (slk-eng)": 10.59, "Tatoeba (slv-eng)": 11.4, "Tatoeba (spa-eng)": 85.78, "Tatoeba (sqi-eng)": 14.92, "Tatoeba (srp-eng)": 9.87, "Tatoeba (swe-eng)": 55.08, "Tatoeba (swg-eng)": 32.66, "Tatoeba (swh-eng)": 7.64, "Tatoeba (tam-eng)": 0.49, "Tatoeba (tat-eng)": 1.28, "Tatoeba (tel-eng)": 0.45, "Tatoeba (tgl-eng)": 23.63, "Tatoeba (tha-eng)": 0.61, "Tatoeba (tuk-eng)": 5.71, "Tatoeba (tur-eng)": 8.25, "Tatoeba (tzl-eng)": 28.4, "Tatoeba (uig-eng)": 0.57, "Tatoeba (ukr-eng)": 5.69, "Tatoeba (urd-eng)": 0.0, "Tatoeba (uzb-eng)": 4.19, "Tatoeba (vie-eng)": 9.07, "Tatoeba (war-eng)": 12.31, "Tatoeba (wuu-eng)": 1.38, "Tatoeba (xho-eng)": 7.6, "Tatoeba (yid-eng)": 0.41, "Tatoeba (yue-eng)": 1.31, "Tatoeba (zsm-eng)": 29.74 } ] }, "Classification": { "accuracy": [ { "Model": "gtr-t5-xl", "AmazonCounterfactualClassification (de)": 59.79, "AmazonCounterfactualClassification (en)": 68.6, "AmazonCounterfactualClassification (en-ext)": 69.03, "AmazonCounterfactualClassification (ja)": 50.59, "AmazonPolarityClassification": 74.58, "AmazonReviewsClassification (de)": 35.06, "AmazonReviewsClassification (en)": 38.2, "AmazonReviewsClassification (es)": 37.18, "AmazonReviewsClassification (fr)": 35.48, "AmazonReviewsClassification (ja)": 22.24, "AmazonReviewsClassification (zh)": 21.89, "Banking77Classification": 82.22, "EmotionClassification": 45.54, "ImdbClassification": 68.15, "MTOPDomainClassification (de)": 85.42, "MTOPDomainClassification (en)": 93.6, "MTOPDomainClassification (es)": 88.2, "MTOPDomainClassification (fr)": 85.05, "MTOPDomainClassification (hi)": 21.74, "MTOPDomainClassification (th)": 15.87, "MTOPIntentClassification (de)": 55.75, "MTOPIntentClassification (en)": 65.93, "MTOPIntentClassification (es)": 57.73, "MTOPIntentClassification (fr)": 51.07, "MTOPIntentClassification (hi)": 3.19, "MTOPIntentClassification (th)": 5.55, "MassiveIntentClassification (af)": 42.6, "MassiveIntentClassification (am)": 2.12, "MassiveIntentClassification (ar)": 4.64, "MassiveIntentClassification (az)": 35.05, "MassiveIntentClassification (bn)": 2.84, "MassiveIntentClassification (cy)": 36.19, "MassiveIntentClassification (da)": 48.42, "MassiveIntentClassification (de)": 55.49, "MassiveIntentClassification (el)": 10.14, "MassiveIntentClassification (en)": 70.23, "MassiveIntentClassification (es)": 56.72, "MassiveIntentClassification (fa)": 3.54, "MassiveIntentClassification (fi)": 37.13, "MassiveIntentClassification (fr)": 57.67, "MassiveIntentClassification (he)": 2.56, "MassiveIntentClassification (hi)": 3.24, "MassiveIntentClassification (hu)": 34.22, "MassiveIntentClassification (hy)": 3.01, "MassiveIntentClassification (id)": 46.54, "MassiveIntentClassification (is)": 34.77, "MassiveIntentClassification (it)": 54.13, "MassiveIntentClassification (ja)": 4.27, "MassiveIntentClassification (jv)": 36.97, "MassiveIntentClassification (ka)": 2.72, "MassiveIntentClassification (km)": 5.35, "MassiveIntentClassification (kn)": 3.17, "MassiveIntentClassification (ko)": 2.64, "MassiveIntentClassification (lv)": 36.32, "MassiveIntentClassification (ml)": 3.18, "MassiveIntentClassification (mn)": 22.85, "MassiveIntentClassification (ms)": 42.87, "MassiveIntentClassification (my)": 4.04, "MassiveIntentClassification (nb)": 45.87, "MassiveIntentClassification (nl)": 49.53, "MassiveIntentClassification (pl)": 42.64, "MassiveIntentClassification (pt)": 57.03, "MassiveIntentClassification (ro)": 49.95, "MassiveIntentClassification (ru)": 36.58, "MassiveIntentClassification (sl)": 39.44, "MassiveIntentClassification (sq)": 41.78, "MassiveIntentClassification (sv)": 47.95, "MassiveIntentClassification (sw)": 35.85, "MassiveIntentClassification (ta)": 2.32, "MassiveIntentClassification (te)": 2.2, "MassiveIntentClassification (th)": 3.74, "MassiveIntentClassification (tl)": 43.12, "MassiveIntentClassification (tr)": 35.24, "MassiveIntentClassification (ur)": 3.0, "MassiveIntentClassification (vi)": 30.01, "MassiveIntentClassification (zh-CN)": 1.72, "MassiveIntentClassification (zh-TW)": 3.35, "MassiveScenarioClassification (af)": 52.54, "MassiveScenarioClassification (am)": 6.3, "MassiveScenarioClassification (ar)": 11.96, "MassiveScenarioClassification (az)": 40.17, "MassiveScenarioClassification (bn)": 8.29, "MassiveScenarioClassification (cy)": 42.24, "MassiveScenarioClassification (da)": 57.28, "MassiveScenarioClassification (de)": 68.09, "MassiveScenarioClassification (el)": 16.66, "MassiveScenarioClassification (en)": 75.94, "MassiveScenarioClassification (es)": 64.32, "MassiveScenarioClassification (fa)": 6.9, "MassiveScenarioClassification (fi)": 43.96, "MassiveScenarioClassification (fr)": 66.72, "MassiveScenarioClassification (he)": 7.51, "MassiveScenarioClassification (hi)": 7.82, "MassiveScenarioClassification (hu)": 42.16, "MassiveScenarioClassification (hy)": 9.33, "MassiveScenarioClassification (id)": 53.54, "MassiveScenarioClassification (is)": 42.84, "MassiveScenarioClassification (it)": 62.44, "MassiveScenarioClassification (ja)": 7.29, "MassiveScenarioClassification (jv)": 43.13, "MassiveScenarioClassification (ka)": 7.63, "MassiveScenarioClassification (km)": 9.08, "MassiveScenarioClassification (kn)": 8.1, "MassiveScenarioClassification (ko)": 6.35, "MassiveScenarioClassification (lv)": 40.24, "MassiveScenarioClassification (ml)": 7.65, "MassiveScenarioClassification (mn)": 27.98, "MassiveScenarioClassification (ms)": 52.41, "MassiveScenarioClassification (my)": 9.21, "MassiveScenarioClassification (nb)": 54.44, "MassiveScenarioClassification (nl)": 60.35, "MassiveScenarioClassification (pl)": 49.97, "MassiveScenarioClassification (pt)": 62.78, "MassiveScenarioClassification (ro)": 59.62, "MassiveScenarioClassification (ru)": 43.44, "MassiveScenarioClassification (sl)": 44.79, "MassiveScenarioClassification (sq)": 50.84, "MassiveScenarioClassification (sv)": 58.21, "MassiveScenarioClassification (sw)": 44.63, "MassiveScenarioClassification (ta)": 7.95, "MassiveScenarioClassification (te)": 7.5, "MassiveScenarioClassification (th)": 8.79, "MassiveScenarioClassification (tl)": 53.54, "MassiveScenarioClassification (tr)": 42.47, "MassiveScenarioClassification (ur)": 9.58, "MassiveScenarioClassification (vi)": 34.68, "MassiveScenarioClassification (zh-CN)": 5.21, "MassiveScenarioClassification (zh-TW)": 8.77, "ToxicConversationsClassification": 67.56, "TweetSentimentExtractionClassification": 54.77 } ] }, "Clustering": { "v_measure": [ { "Model": "gtr-t5-xl", "ArxivClusteringP2P": 37.9, "ArxivClusteringS2S": 30.45, "BiorxivClusteringP2P": 30.52, "BiorxivClusteringS2S": 26.06, "MedrxivClusteringP2P": 28.69, "MedrxivClusteringS2S": 26.69, "RedditClustering": 61.34, "RedditClusteringP2P": 61.11, "StackExchangeClustering": 69.95, "StackExchangeClusteringP2P": 32.73, "TwentyNewsgroupsClustering": 51.15 } ] }, "PairClassification": { "ap": [ { "Model": "gtr-t5-xl", "SprintDuplicateQuestions": 95.45, "TwitterSemEval2015": 77.81, "TwitterURLCorpus": 85.14 } ] }, "Reranking": { "map": [ { "Model": "gtr-t5-xl", "AskUbuntuDupQuestions": 63.08, "MindSmallReranking": 31.5, "SciDocsRR": 76.49, "StackOverflowDupQuestions": 52.79 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "gtr-t5-xl", "ArguAna": 52.81, "CQADupstackRetrieval": 37.35, "ClimateFEVER": 27.01, "DBPedia": 39.74, "FEVER": 72.18, "FiQA2018": 44.19, "HotpotQA": 58.91, "MSMARCO": 43.52, "NFCorpus": 33.34, "NQ": 56.16, "QuoraRetrieval": 88.91, "SCIDOCS": 15.71, "SciFact": 64.2, "TRECCOVID": 60.09, "Touche2020": 25.26 } ] }, "STS": { "spearman": [ { "Model": "gtr-t5-xl", "BIOSSES": 78.94, "SICK-R": 73.63, "STS12": 69.11, "STS13": 81.82, "STS14": 77.07, "STS15": 86.01, "STS16": 82.23, "STS17 (ar-ar)": 9.06, "STS17 (en-ar)": -3.22, "STS17 (en-de)": 70.38, "STS17 (en-en)": 84.9, "STS17 (en-tr)": 17.17, "STS17 (es-en)": 60.24, "STS17 (es-es)": 81.93, "STS17 (fr-en)": 62.17, "STS17 (it-en)": 59.11, "STS17 (ko-ko)": 8.9, "STS17 (nl-en)": 56.91, "STS22 (ar)": 37.66, "STS22 (de)": 50.58, "STS22 (de-en)": 53.63, "STS22 (de-fr)": 55.72, "STS22 (de-pl)": 27.99, "STS22 (en)": 66.61, "STS22 (es)": 59.14, "STS22 (es-en)": 69.99, "STS22 (es-it)": 60.94, "STS22 (fr)": 79.43, "STS22 (fr-pl)": 61.98, "STS22 (it)": 67.14, "STS22 (pl)": 33.74, "STS22 (pl-en)": 60.18, "STS22 (ru)": 32.69, "STS22 (tr)": 55.79, "STS22 (zh)": 31.16, "STS22 (zh-en)": 28.85, "STSBenchmark": 77.65 } ] }, "Summarization": { "spearman": [ { "Model": "gtr-t5-xl", "SummEval": 30.21 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "gtr-t5-xl" } ] } }, "luotuo-bert-medium": { "BitextMining": { "f1": [ { "Model": "luotuo-bert-medium" } ] }, "Classification": { "accuracy": [ { "Model": "luotuo-bert-medium", "AmazonReviewsClassification (zh)": 34.46, "IFlyTek": 41.75, "JDReview": 79.68, "MassiveIntentClassification (zh-CN)": 57.47, "MassiveScenarioClassification (zh-CN)": 65.32, "MultilingualSentiment": 61.21, "OnlineShopping": 84.3, "TNews": 45.22, "Waimai": 79.57 } ] }, "Clustering": { "v_measure": [ { "Model": "luotuo-bert-medium", "CLSClusteringP2P": 37.01, "CLSClusteringS2S": 33.46, "ThuNewsClusteringP2P": 58.83, "ThuNewsClusteringS2S": 48.26 } ] }, "PairClassification": { "ap": [ { "Model": "luotuo-bert-medium", "Cmnli": 72.55, "Ocnli": 60.7 } ] }, "Reranking": { "map": [ { "Model": "luotuo-bert-medium", "CMedQAv1": 57.82, "CMedQAv2": 58.88, "MMarcoReranking": 14.55, "T2Reranking": 65.76 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "luotuo-bert-medium", "CmedqaRetrieval": 18.04, "CovidRetrieval": 55.48, "DuRetrieval": 59.36, "EcomRetrieval": 40.48, "MMarcoRetrieval": 55.31, "MedicalRetrieval": 29.8, "T2Retrieval": 58.67, "VideoRetrieval": 38.04 } ] }, "STS": { "spearman": [ { "Model": "luotuo-bert-medium", "AFQMC": 22.24, "ATEC": 30.84, "BQ": 43.33, "LCQMC": 66.74, "PAWSX": 12.31, "QBQTC": 27.2, "STS22 (zh)": 66.4, "STSB": 73.22 } ] }, "Summarization": { "spearman": [ { "Model": "luotuo-bert-medium" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "luotuo-bert-medium" } ] } }, "contriever-instruct": { "BitextMining": { "f1": [ { "Model": "contriever-instruct" } ] }, "Classification": { "accuracy": [ { "Model": "contriever-instruct" } ] }, "Clustering": { "v_measure": [ { "Model": "contriever-instruct" } ] }, "PairClassification": { "ap": [ { "Model": "contriever-instruct" } ] }, "Reranking": { "map": [ { "Model": "contriever-instruct" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "contriever-instruct", "ARCChallenge": 7.63, "AlphaNLI": 27.09, "PIQA": 21.73, "Quail": 4.92, "RARbCode": 7.12, "RARbMath": 21.83, "SIQA": 0.88, "SpartQA": 10.56, "TempReasonL1": 1.8, "TempReasonL2Fact": 22.03, "TempReasonL2Pure": 0.94, "TempReasonL3Fact": 20.82, "TempReasonL3Pure": 7.15, "WinoGrande": 26.3 } ] }, "STS": { "spearman": [ { "Model": "contriever-instruct" } ] }, "Summarization": { "spearman": [ { "Model": "contriever-instruct" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "contriever-instruct" } ] } }, "text-search-babbage-001": { "BitextMining": { "f1": [ { "Model": "text-search-babbage-001" } ] }, "Classification": { "accuracy": [ { "Model": "text-search-babbage-001" } ] }, "Clustering": { "v_measure": [ { "Model": "text-search-babbage-001" } ] }, "PairClassification": { "ap": [ { "Model": "text-search-babbage-001" } ] }, "Reranking": { "map": [ { "Model": "text-search-babbage-001" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "text-search-babbage-001", "ArguAna": 49.2, "ClimateFEVER": 19.9, "FEVER": 77.0, "FiQA2018": 42.2, "HotpotQA": 63.1, "NFCorpus": 36.7, "QuoraRetrieval": 69.7, "SciFact": 70.4, "TRECCOVID": 58.5, "Touche2020": 29.7 } ] }, "STS": { "spearman": [ { "Model": "text-search-babbage-001" } ] }, "Summarization": { "spearman": [ { "Model": "text-search-babbage-001" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "text-search-babbage-001" } ] } }, "Cohere-embed-multilingual-light-v3.0": { "BitextMining": { "f1": [ { "Model": "Cohere-embed-multilingual-light-v3.0" } ] }, "Classification": { "accuracy": [ { "Model": "Cohere-embed-multilingual-light-v3.0", "AmazonReviewsClassification (fr)": 38.6, "MTOPDomainClassification (fr)": 80.79, "MTOPIntentClassification (fr)": 50.01, "MasakhaNEWSClassification (fra)": 82.58, "MassiveIntentClassification (fr)": 56.31, "MassiveScenarioClassification (fr)": 59.5 } ] }, "Clustering": { "v_measure": [ { "Model": "Cohere-embed-multilingual-light-v3.0", "AlloProfClusteringP2P": 61.96, "AlloProfClusteringS2S": 31.36, "HALClusteringS2S": 17.31, "MLSUMClusteringP2P": 42.8, "MLSUMClusteringS2S": 32.72, "MasakhaNEWSClusteringP2P (fra)": 56.81, "MasakhaNEWSClusteringS2S (fra)": 29.41 } ] }, "PairClassification": { "ap": [ { "Model": "Cohere-embed-multilingual-light-v3.0", "OpusparcusPC (fr)": 90.92, "PawsXPairClassification (fr)": 57.32 } ] }, "Reranking": { "map": [ { "Model": "Cohere-embed-multilingual-light-v3.0", "AlloprofReranking": 51.6, "SyntecReranking": 88.03 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "Cohere-embed-multilingual-light-v3.0", "AlloprofRetrieval": 35.39, "BSARDRetrieval": 0.0, "MintakaRetrieval (fr)": 23.0, "SyntecRetrieval": 76.88, "XPQARetrieval (fr)": 45.23 } ] }, "STS": { "spearman": [ { "Model": "Cohere-embed-multilingual-light-v3.0", "SICKFr": 75.5, "STS22 (fr)": 82.8, "STSBenchmarkMultilingualSTS (fr)": 76.48 } ] }, "Summarization": { "spearman": [ { "Model": "Cohere-embed-multilingual-light-v3.0", "SummEvalFr": 31.4 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "Cohere-embed-multilingual-light-v3.0" } ] } }, "e5-mistral-7b-instruct": { "BitextMining": { "f1": [ { "Model": "e5-mistral-7b-instruct", "Tatoeba (rus-Cyrl_eng-Latn)": 93.75 } ] }, "Classification": { "accuracy": [ { "Model": "e5-mistral-7b-instruct", "AmazonReviewsClassification (fr)": 36.71, "GeoreviewClassification (rus-Cyrl)": 50.25, "HeadlineClassification (rus-Cyrl)": 85.68, "InappropriatenessClassification (rus-Cyrl)": 67.19, "KinopoiskClassification (rus-Cyrl)": 65.49, "MTOPDomainClassification (fr)": 74.8, "MTOPIntentClassification (fr)": 53.97, "MasakhaNEWSClassification (fra)": 80.59, "MassiveIntentClassification (rus-Cyrl)": 76.08, "MassiveIntentClassification (fr)": 46.39, "MassiveScenarioClassification (rus-Cyrl)": 79.61, "MassiveScenarioClassification (fr)": 53.86, "RuReviewsClassification (rus-Cyrl)": 67.68, "RuSciBenchGRNTIClassification (rus-Cyrl)": 64.59, "RuSciBenchOECDClassification (rus-Cyrl)": 51.13 } ] }, "Clustering": { "v_measure": [ { "Model": "e5-mistral-7b-instruct", "AlloProfClusteringP2P": 61.06, "AlloProfClusteringS2S": 28.12, "GeoreviewClusteringP2P (rus-Cyrl)": 65.68, "HALClusteringS2S": 19.69, "MLSUMClusteringP2P": 45.59, "MLSUMClusteringS2S": 32.0, "MasakhaNEWSClusteringP2P (fra)": 52.47, "MasakhaNEWSClusteringS2S (fra)": 49.2, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 61.55, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 52.72 } ] }, "PairClassification": { "ap": [ { "Model": "e5-mistral-7b-instruct", "OpusparcusPC (rus-Cyrl)": 91.44, "OpusparcusPC (fr)": 88.5, "PawsXPairClassification (fr)": 63.65, "TERRa (rus-Cyrl)": 59.38 } ] }, "Reranking": { "map": [ { "Model": "e5-mistral-7b-instruct", "AlloprofReranking": 47.36, "RuBQReranking (rus-Cyrl)": 74.61, "SyntecReranking": 77.05 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "e5-mistral-7b-instruct", "AILACasedocs": 38.76, "AILAStatutes": 38.07, "ARCChallenge": 17.81, "AlloprofRetrieval": 16.46, "AlphaNLI": 26.12, "BSARDRetrieval": 0.0, "BrightRetrieval (sustainable_living)": 18.51, "BrightRetrieval (economics)": 15.49, "BrightRetrieval (theoremqa_theorems)": 23.78, "BrightRetrieval (aops)": 7.1, "BrightRetrieval (theoremqa_questions)": 23.94, "BrightRetrieval (stackoverflow)": 9.83, "BrightRetrieval (psychology)": 15.79, "BrightRetrieval (pony)": 4.81, "BrightRetrieval (leetcode)": 28.72, "BrightRetrieval (biology)": 18.84, "BrightRetrieval (earth_science)": 25.96, "BrightRetrieval (robotics)": 16.37, "GerDaLIRSmall": 37.18, "HellaSwag": 34.85, "LEMBNarrativeQARetrieval": 44.62, "LEMBNeedleRetrieval": 48.25, "LEMBPasskeyRetrieval": 71.0, "LEMBQMSumRetrieval": 43.63, "LEMBSummScreenFDRetrieval": 96.82, "LEMBWikimQARetrieval": 82.11, "LeCaRDv2": 68.56, "LegalBenchConsumerContractsQA": 75.46, "LegalBenchCorporateLobbying": 94.01, "LegalQuAD": 59.64, "LegalSummarization": 66.51, "MintakaRetrieval (fr)": 3.57, "PIQA": 39.37, "Quail": 7.01, "RARbCode": 78.46, "RARbMath": 72.16, "RiaNewsRetrieval (rus-Cyrl)": 81.94, "RuBQRetrieval (rus-Cyrl)": 73.98, "SIQA": 5.42, "SpartQA": 9.92, "SyntecRetrieval": 55.9, "TempReasonL1": 3.31, "TempReasonL2Fact": 36.9, "TempReasonL2Pure": 9.18, "TempReasonL3Fact": 30.18, "TempReasonL3Pure": 14.31, "WinoGrande": 41.21, "XPQARetrieval (fr)": 41.29 } ] }, "STS": { "spearman": [ { "Model": "e5-mistral-7b-instruct", "RUParaPhraserSTS (rus-Cyrl)": 76.17, "RuSTSBenchmarkSTS (rus-Cyrl)": 84.13, "SICKFr": 64.39, "STS22 (fr)": 69.82, "STSBenchmarkMultilingualSTS (rus-Cyrl)": 84.25, "STSBenchmarkMultilingualSTS (fr)": 61.87 } ] }, "Summarization": { "spearman": [ { "Model": "e5-mistral-7b-instruct", "SummEvalFr": 32.22 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "e5-mistral-7b-instruct", "Core17InstructionRetrieval": 0.09, "News21InstructionRetrieval": -0.86, "Robust04InstructionRetrieval": -9.59 } ] } }, "e5-large": { "BitextMining": { "f1": [ { "Model": "e5-large", "BornholmBitextMining": 40.15 } ] }, "Classification": { "accuracy": [ { "Model": "e5-large", "AngryTweetsClassification": 46.14, "DKHateClassification": 58.72, "DanishPoliticalCommentsClassification": 28.67, "LccSentimentClassification": 42.13, "MassiveIntentClassification (da)": 42.29, "MassiveIntentClassification (nb)": 40.63, "MassiveIntentClassification (sv)": 40.69, "MassiveScenarioClassification (da)": 52.95, "MassiveScenarioClassification (nb)": 51.91, "MassiveScenarioClassification (sv)": 50.97, "NoRecClassification": 41.83, "NordicLangClassification": 58.3, "NorwegianParliament": 57.26, "ScalaDaClassification": 49.9, "ScalaNbClassification": 50.13 } ] }, "Clustering": { "v_measure": [ { "Model": "e5-large" } ] }, "PairClassification": { "ap": [ { "Model": "e5-large" } ] }, "Reranking": { "map": [ { "Model": "e5-large" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "e5-large" } ] }, "STS": { "spearman": [ { "Model": "e5-large" } ] }, "Summarization": { "spearman": [ { "Model": "e5-large" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "e5-large" } ] } }, "distilrubert-small-cased-conversational": { "BitextMining": { "f1": [ { "Model": "distilrubert-small-cased-conversational", "Tatoeba (rus-Cyrl_eng-Latn)": 24.16 } ] }, "Classification": { "accuracy": [ { "Model": "distilrubert-small-cased-conversational", "GeoreviewClassification (rus-Cyrl)": 38.95, "HeadlineClassification (rus-Cyrl)": 75.59, "InappropriatenessClassification (rus-Cyrl)": 60.68, "KinopoiskClassification (rus-Cyrl)": 49.67, "MassiveIntentClassification (rus-Cyrl)": 63.12, "MassiveScenarioClassification (rus-Cyrl)": 68.08, "RuReviewsClassification (rus-Cyrl)": 54.05, "RuSciBenchGRNTIClassification (rus-Cyrl)": 48.53, "RuSciBenchOECDClassification (rus-Cyrl)": 37.65 } ] }, "Clustering": { "v_measure": [ { "Model": "distilrubert-small-cased-conversational", "GeoreviewClusteringP2P (rus-Cyrl)": 43.26, "MLSUMClusteringP2P (rus-Cyrl)": 50.08, "MLSUMClusteringS2S (rus-Cyrl)": 51.12, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 37.84, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 34.12 } ] }, "PairClassification": { "ap": [ { "Model": "distilrubert-small-cased-conversational", "OpusparcusPC (rus-Cyrl)": 84.35, "TERRa (rus-Cyrl)": 52.48 } ] }, "Reranking": { "map": [ { "Model": "distilrubert-small-cased-conversational", "RuBQReranking (rus-Cyrl)": 42.58 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "distilrubert-small-cased-conversational", "RiaNewsRetrieval (rus-Cyrl)": 4.14, "RuBQRetrieval (rus-Cyrl)": 10.6 } ] }, "STS": { "spearman": [ { "Model": "distilrubert-small-cased-conversational", "RUParaPhraserSTS (rus-Cyrl)": 55.01, "RuSTSBenchmarkSTS (rus-Cyrl)": 61.72, "STS22 (rus-Cyrl)": 51.87, "STSBenchmarkMultilingualSTS (rus-Cyrl)": 61.6 } ] }, "Summarization": { "spearman": [ { "Model": "distilrubert-small-cased-conversational" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "distilrubert-small-cased-conversational" } ] } }, "gtr-t5-large": { "BitextMining": { "f1": [ { "Model": "gtr-t5-large" } ] }, "Classification": { "accuracy": [ { "Model": "gtr-t5-large", "AmazonCounterfactualClassification (de)": 59.38, "AmazonCounterfactualClassification (en)": 70.03, "AmazonCounterfactualClassification (en-ext)": 69.86, "AmazonCounterfactualClassification (ja)": 45.87, "AmazonPolarityClassification": 73.92, "AmazonReviewsClassification (de)": 33.06, "AmazonReviewsClassification (en)": 37.21, "AmazonReviewsClassification (es)": 34.0, "AmazonReviewsClassification (fr)": 33.48, "AmazonReviewsClassification (ja)": 21.78, "AmazonReviewsClassification (zh)": 21.83, "Banking77Classification": 81.21, "EmotionClassification": 46.33, "ImdbClassification": 70.86, "MTOPDomainClassification (de)": 81.91, "MTOPDomainClassification (en)": 94.01, "MTOPDomainClassification (es)": 84.7, "MTOPDomainClassification (fr)": 82.48, "MTOPDomainClassification (hi)": 22.11, "MTOPDomainClassification (th)": 16.36, "MTOPIntentClassification (de)": 52.13, "MTOPIntentClassification (en)": 63.86, "MTOPIntentClassification (es)": 52.62, "MTOPIntentClassification (fr)": 46.39, "MTOPIntentClassification (hi)": 3.9, "MTOPIntentClassification (th)": 5.38, "MassiveIntentClassification (af)": 41.02, "MassiveIntentClassification (am)": 2.34, "MassiveIntentClassification (ar)": 4.87, "MassiveIntentClassification (az)": 34.92, "MassiveIntentClassification (bn)": 2.52, "MassiveIntentClassification (cy)": 35.87, "MassiveIntentClassification (da)": 45.3, "MassiveIntentClassification (de)": 51.48, "MassiveIntentClassification (el)": 10.0, "MassiveIntentClassification (en)": 70.06, "MassiveIntentClassification (es)": 53.3, "MassiveIntentClassification (fa)": 3.59, "MassiveIntentClassification (fi)": 37.35, "MassiveIntentClassification (fr)": 54.83, "MassiveIntentClassification (he)": 2.52, "MassiveIntentClassification (hi)": 2.88, "MassiveIntentClassification (hu)": 33.52, "MassiveIntentClassification (hy)": 3.13, "MassiveIntentClassification (id)": 40.11, "MassiveIntentClassification (is)": 34.77, "MassiveIntentClassification (it)": 51.21, "MassiveIntentClassification (ja)": 4.75, "MassiveIntentClassification (jv)": 35.6, "MassiveIntentClassification (ka)": 2.71, "MassiveIntentClassification (km)": 5.48, "MassiveIntentClassification (kn)": 2.44, "MassiveIntentClassification (ko)": 2.59, "MassiveIntentClassification (lv)": 38.15, "MassiveIntentClassification (ml)": 2.67, "MassiveIntentClassification (mn)": 18.47, "MassiveIntentClassification (ms)": 35.58, "MassiveIntentClassification (my)": 4.35, "MassiveIntentClassification (nb)": 43.78, "MassiveIntentClassification (nl)": 45.96, "MassiveIntentClassification (pl)": 39.08, "MassiveIntentClassification (pt)": 52.27, "MassiveIntentClassification (ro)": 46.39, "MassiveIntentClassification (ru)": 16.82, "MassiveIntentClassification (sl)": 37.3, "MassiveIntentClassification (sq)": 41.73, "MassiveIntentClassification (sv)": 43.51, "MassiveIntentClassification (sw)": 35.97, "MassiveIntentClassification (ta)": 1.52, "MassiveIntentClassification (te)": 2.57, "MassiveIntentClassification (th)": 3.94, "MassiveIntentClassification (tl)": 41.03, "MassiveIntentClassification (tr)": 33.75, "MassiveIntentClassification (ur)": 2.57, "MassiveIntentClassification (vi)": 25.23, "MassiveIntentClassification (zh-CN)": 2.41, "MassiveIntentClassification (zh-TW)": 4.64, "MassiveScenarioClassification (af)": 51.48, "MassiveScenarioClassification (am)": 7.74, "MassiveScenarioClassification (ar)": 12.03, "MassiveScenarioClassification (az)": 41.77, "MassiveScenarioClassification (bn)": 8.07, "MassiveScenarioClassification (cy)": 43.67, "MassiveScenarioClassification (da)": 54.88, "MassiveScenarioClassification (de)": 63.63, "MassiveScenarioClassification (el)": 16.83, "MassiveScenarioClassification (en)": 75.49, "MassiveScenarioClassification (es)": 61.48, "MassiveScenarioClassification (fa)": 6.48, "MassiveScenarioClassification (fi)": 43.54, "MassiveScenarioClassification (fr)": 64.06, "MassiveScenarioClassification (he)": 8.03, "MassiveScenarioClassification (hi)": 7.5, "MassiveScenarioClassification (hu)": 42.59, "MassiveScenarioClassification (hy)": 9.22, "MassiveScenarioClassification (id)": 48.67, "MassiveScenarioClassification (is)": 43.87, "MassiveScenarioClassification (it)": 59.83, "MassiveScenarioClassification (ja)": 5.62, "MassiveScenarioClassification (jv)": 42.18, "MassiveScenarioClassification (ka)": 7.52, "MassiveScenarioClassification (km)": 9.55, "MassiveScenarioClassification (kn)": 8.34, "MassiveScenarioClassification (ko)": 6.11, "MassiveScenarioClassification (lv)": 43.35, "MassiveScenarioClassification (ml)": 7.28, "MassiveScenarioClassification (mn)": 23.94, "MassiveScenarioClassification (ms)": 45.18, "MassiveScenarioClassification (my)": 9.33, "MassiveScenarioClassification (nb)": 52.71, "MassiveScenarioClassification (nl)": 57.02, "MassiveScenarioClassification (pl)": 46.79, "MassiveScenarioClassification (pt)": 59.45, "MassiveScenarioClassification (ro)": 56.8, "MassiveScenarioClassification (ru)": 25.85, "MassiveScenarioClassification (sl)": 42.51, "MassiveScenarioClassification (sq)": 50.41, "MassiveScenarioClassification (sv)": 54.16, "MassiveScenarioClassification (sw)": 43.02, "MassiveScenarioClassification (ta)": 7.21, "MassiveScenarioClassification (te)": 6.9, "MassiveScenarioClassification (th)": 8.7, "MassiveScenarioClassification (tl)": 51.76, "MassiveScenarioClassification (tr)": 42.54, "MassiveScenarioClassification (ur)": 9.32, "MassiveScenarioClassification (vi)": 31.51, "MassiveScenarioClassification (zh-CN)": 3.84, "MassiveScenarioClassification (zh-TW)": 8.16, "ToxicConversationsClassification": 68.65, "TweetSentimentExtractionClassification": 54.09 } ] }, "Clustering": { "v_measure": [ { "Model": "gtr-t5-large", "ArxivClusteringP2P": 37.5, "ArxivClusteringS2S": 30.55, "BiorxivClusteringP2P": 29.59, "BiorxivClusteringS2S": 25.72, "MedrxivClusteringP2P": 28.72, "MedrxivClusteringS2S": 27.39, "RedditClustering": 61.69, "RedditClusteringP2P": 61.67, "StackExchangeClustering": 69.93, "StackExchangeClusteringP2P": 33.21, "TwentyNewsgroupsClustering": 51.64 } ] }, "PairClassification": { "ap": [ { "Model": "gtr-t5-large", "SprintDuplicateQuestions": 95.05, "TwitterSemEval2015": 76.03, "TwitterURLCorpus": 84.89 } ] }, "Reranking": { "map": [ { "Model": "gtr-t5-large", "AskUbuntuDupQuestions": 61.64, "MindSmallReranking": 31.84, "SciDocsRR": 76.39, "StackOverflowDupQuestions": 51.58 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "gtr-t5-large", "ArguAna": 52.09, "CQADupstackRetrieval": 36.62, "ClimateFEVER": 26.9, "DBPedia": 39.55, "FEVER": 72.66, "FiQA2018": 42.79, "HotpotQA": 57.85, "MSMARCO": 42.73, "NFCorpus": 32.63, "NQ": 55.09, "QuoraRetrieval": 88.47, "SCIDOCS": 15.51, "SciFact": 63.42, "TRECCOVID": 56.68, "Touche2020": 28.29 } ] }, "STS": { "spearman": [ { "Model": "gtr-t5-large", "BIOSSES": 84.86, "SICK-R": 73.39, "STS12": 70.33, "STS13": 82.19, "STS14": 77.16, "STS15": 86.31, "STS16": 81.85, "STS17 (ar-ar)": 10.19, "STS17 (en-ar)": -5.77, "STS17 (en-de)": 67.43, "STS17 (en-en)": 83.93, "STS17 (en-tr)": 8.75, "STS17 (es-en)": 54.96, "STS17 (es-es)": 82.74, "STS17 (fr-en)": 60.5, "STS17 (it-en)": 46.26, "STS17 (ko-ko)": 8.96, "STS17 (nl-en)": 47.48, "STS22 (ar)": 34.97, "STS22 (de)": 51.7, "STS22 (de-en)": 48.76, "STS22 (de-fr)": 57.5, "STS22 (de-pl)": 32.76, "STS22 (en)": 64.3, "STS22 (es)": 57.49, "STS22 (es-en)": 67.76, "STS22 (es-it)": 57.18, "STS22 (fr)": 78.7, "STS22 (fr-pl)": 61.98, "STS22 (it)": 67.67, "STS22 (pl)": 30.68, "STS22 (pl-en)": 54.17, "STS22 (ru)": 15.36, "STS22 (tr)": 58.12, "STS22 (zh)": 27.32, "STS22 (zh-en)": 29.42, "STSBenchmark": 77.6 } ] }, "Summarization": { "spearman": [ { "Model": "gtr-t5-large", "SummEval": 29.5 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "gtr-t5-large" } ] } }, "allenai-specter": { "BitextMining": { "f1": [ { "Model": "allenai-specter" } ] }, "Classification": { "accuracy": [ { "Model": "allenai-specter", "AmazonCounterfactualClassification (de)": 54.46, "AmazonCounterfactualClassification (en)": 58.7, "AmazonCounterfactualClassification (en-ext)": 59.28, "AmazonCounterfactualClassification (ja)": 43.87, "AmazonPolarityClassification": 57.77, "AmazonReviewsClassification (de)": 24.08, "AmazonReviewsClassification (en)": 26.26, "AmazonReviewsClassification (es)": 23.88, "AmazonReviewsClassification (fr)": 23.31, "AmazonReviewsClassification (ja)": 20.25, "AmazonReviewsClassification (zh)": 20.49, "Banking77Classification": 66.66, "EmotionClassification": 24.82, "ImdbClassification": 56.35, "MTOPDomainClassification (de)": 48.55, "MTOPDomainClassification (en)": 74.53, "MTOPDomainClassification (es)": 58.39, "MTOPDomainClassification (fr)": 54.61, "MTOPDomainClassification (hi)": 21.22, "MTOPDomainClassification (th)": 14.98, "MTOPIntentClassification (de)": 35.55, "MTOPIntentClassification (en)": 50.05, "MTOPIntentClassification (es)": 36.72, "MTOPIntentClassification (fr)": 34.71, "MTOPIntentClassification (hi)": 4.44, "MTOPIntentClassification (th)": 4.67, "MassiveIntentClassification (af)": 33.68, "MassiveIntentClassification (am)": 2.94, "MassiveIntentClassification (ar)": 10.04, "MassiveIntentClassification (az)": 30.74, "MassiveIntentClassification (bn)": 3.02, "MassiveIntentClassification (cy)": 33.94, "MassiveIntentClassification (da)": 38.47, "MassiveIntentClassification (de)": 36.06, "MassiveIntentClassification (el)": 27.7, "MassiveIntentClassification (en)": 51.73, "MassiveIntentClassification (es)": 35.6, "MassiveIntentClassification (fa)": 17.97, "MassiveIntentClassification (fi)": 35.53, "MassiveIntentClassification (fr)": 38.41, "MassiveIntentClassification (he)": 2.69, "MassiveIntentClassification (hi)": 3.43, "MassiveIntentClassification (hu)": 34.05, "MassiveIntentClassification (hy)": 3.11, "MassiveIntentClassification (id)": 40.02, "MassiveIntentClassification (is)": 32.63, "MassiveIntentClassification (it)": 39.28, "MassiveIntentClassification (ja)": 4.95, "MassiveIntentClassification (jv)": 34.95, "MassiveIntentClassification (ka)": 2.57, "MassiveIntentClassification (km)": 4.73, "MassiveIntentClassification (kn)": 3.54, "MassiveIntentClassification (ko)": 2.68, "MassiveIntentClassification (lv)": 37.91, "MassiveIntentClassification (ml)": 2.88, "MassiveIntentClassification (mn)": 16.94, "MassiveIntentClassification (ms)": 36.6, "MassiveIntentClassification (my)": 3.96, "MassiveIntentClassification (nb)": 34.75, "MassiveIntentClassification (nl)": 33.95, "MassiveIntentClassification (pl)": 35.77, "MassiveIntentClassification (pt)": 43.05, "MassiveIntentClassification (ro)": 36.2, "MassiveIntentClassification (ru)": 25.3, "MassiveIntentClassification (sl)": 35.9, "MassiveIntentClassification (sq)": 36.6, "MassiveIntentClassification (sv)": 36.0, "MassiveIntentClassification (sw)": 34.81, "MassiveIntentClassification (ta)": 3.11, "MassiveIntentClassification (te)": 2.53, "MassiveIntentClassification (th)": 4.38, "MassiveIntentClassification (tl)": 35.51, "MassiveIntentClassification (tr)": 32.02, "MassiveIntentClassification (ur)": 9.61, "MassiveIntentClassification (vi)": 37.07, "MassiveIntentClassification (zh-CN)": 2.81, "MassiveIntentClassification (zh-TW)": 4.79, "MassiveScenarioClassification (af)": 36.17, "MassiveScenarioClassification (am)": 7.64, "MassiveScenarioClassification (ar)": 15.26, "MassiveScenarioClassification (az)": 30.73, "MassiveScenarioClassification (bn)": 7.15, "MassiveScenarioClassification (cy)": 34.73, "MassiveScenarioClassification (da)": 39.93, "MassiveScenarioClassification (de)": 38.62, "MassiveScenarioClassification (el)": 27.18, "MassiveScenarioClassification (en)": 58.58, "MassiveScenarioClassification (es)": 39.44, "MassiveScenarioClassification (fa)": 21.43, "MassiveScenarioClassification (fi)": 33.21, "MassiveScenarioClassification (fr)": 40.26, "MassiveScenarioClassification (he)": 7.42, "MassiveScenarioClassification (hi)": 8.06, "MassiveScenarioClassification (hu)": 34.54, "MassiveScenarioClassification (hy)": 8.61, "MassiveScenarioClassification (id)": 40.04, "MassiveScenarioClassification (is)": 33.57, "MassiveScenarioClassification (it)": 40.1, "MassiveScenarioClassification (ja)": 9.96, "MassiveScenarioClassification (jv)": 36.11, "MassiveScenarioClassification (ka)": 7.13, "MassiveScenarioClassification (km)": 9.66, "MassiveScenarioClassification (kn)": 7.55, "MassiveScenarioClassification (ko)": 7.27, "MassiveScenarioClassification (lv)": 37.03, "MassiveScenarioClassification (ml)": 7.22, "MassiveScenarioClassification (mn)": 21.53, "MassiveScenarioClassification (ms)": 37.57, "MassiveScenarioClassification (my)": 9.54, "MassiveScenarioClassification (nb)": 35.71, "MassiveScenarioClassification (nl)": 34.62, "MassiveScenarioClassification (pl)": 36.87, "MassiveScenarioClassification (pt)": 44.68, "MassiveScenarioClassification (ro)": 37.29, "MassiveScenarioClassification (ru)": 28.16, "MassiveScenarioClassification (sl)": 37.95, "MassiveScenarioClassification (sq)": 37.82, "MassiveScenarioClassification (sv)": 35.35, "MassiveScenarioClassification (sw)": 35.37, "MassiveScenarioClassification (ta)": 7.19, "MassiveScenarioClassification (te)": 7.29, "MassiveScenarioClassification (th)": 9.47, "MassiveScenarioClassification (tl)": 37.31, "MassiveScenarioClassification (tr)": 34.57, "MassiveScenarioClassification (ur)": 16.17, "MassiveScenarioClassification (vi)": 35.91, "MassiveScenarioClassification (zh-CN)": 9.19, "MassiveScenarioClassification (zh-TW)": 10.19, "ToxicConversationsClassification": 57.44, "TweetSentimentExtractionClassification": 45.52 } ] }, "Clustering": { "v_measure": [ { "Model": "allenai-specter", "ArxivClusteringP2P": 44.75, "ArxivClusteringS2S": 35.27, "BiorxivClusteringP2P": 39.52, "BiorxivClusteringS2S": 34.53, "MedrxivClusteringP2P": 35.04, "MedrxivClusteringS2S": 31.66, "RedditClustering": 24.13, "RedditClusteringP2P": 35.06, "StackExchangeClustering": 39.01, "StackExchangeClusteringP2P": 31.46, "TwentyNewsgroupsClustering": 24.22 } ] }, "PairClassification": { "ap": [ { "Model": "allenai-specter", "SprintDuplicateQuestions": 71.63, "TwitterSemEval2015": 43.25, "TwitterURLCorpus": 69.22 } ] }, "Reranking": { "map": [ { "Model": "allenai-specter", "AskUbuntuDupQuestions": 50.07, "MindSmallReranking": 24.8, "SciDocsRR": 81.31, "StackOverflowDupQuestions": 36.22 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "allenai-specter", "ArguAna": 32.67, "CQADupstackRetrieval": 14.6, "ClimateFEVER": 6.86, "DBPedia": 4.14, "FEVER": 5.45, "FiQA2018": 5.64, "HotpotQA": 5.46, "MSMARCO": 5.59, "NFCorpus": 0.85, "NQ": 5.99, "QuoraRetrieval": 64.65, "SCIDOCS": 0.0, "SciFact": 47.88, "TRECCOVID": 29.91, "Touche2020": 8.46 } ] }, "STS": { "spearman": [ { "Model": "allenai-specter", "BIOSSES": 64.95, "SICK-R": 56.39, "STS12": 62.49, "STS13": 58.7, "STS14": 54.87, "STS15": 62.54, "STS16": 64.27, "STS17 (ar-ar)": 27.14, "STS17 (en-ar)": 6.9, "STS17 (en-de)": 11.59, "STS17 (en-en)": 69.63, "STS17 (en-tr)": 6.46, "STS17 (es-en)": 10.86, "STS17 (es-es)": 55.45, "STS17 (fr-en)": 16.02, "STS17 (it-en)": 19.87, "STS17 (ko-ko)": 8.08, "STS17 (nl-en)": 24.92, "STS22 (ar)": 19.57, "STS22 (de)": 17.31, "STS22 (de-en)": 26.03, "STS22 (de-fr)": 10.26, "STS22 (de-pl)": 16.94, "STS22 (en)": 55.06, "STS22 (es)": 48.89, "STS22 (es-en)": 51.79, "STS22 (es-it)": 25.24, "STS22 (fr)": 53.92, "STS22 (fr-pl)": 39.44, "STS22 (it)": 39.43, "STS22 (pl)": 13.56, "STS22 (pl-en)": 25.36, "STS22 (ru)": 1.11, "STS22 (tr)": 31.73, "STS22 (zh)": 16.35, "STS22 (zh-en)": 8.44, "STSBenchmark": 61.26 } ] }, "Summarization": { "spearman": [ { "Model": "allenai-specter", "SummEval": 27.66 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "allenai-specter" } ] } }, "bert-base-multilingual-uncased": { "BitextMining": { "f1": [ { "Model": "bert-base-multilingual-uncased" } ] }, "Classification": { "accuracy": [ { "Model": "bert-base-multilingual-uncased", "AmazonReviewsClassification (fr)": 29.02, "MTOPDomainClassification (fr)": 64.49, "MTOPIntentClassification (fr)": 39.4, "MasakhaNEWSClassification (fra)": 75.69, "MassiveIntentClassification (fr)": 38.01, "MassiveScenarioClassification (fr)": 43.63 } ] }, "Clustering": { "v_measure": [ { "Model": "bert-base-multilingual-uncased", "AlloProfClusteringP2P": 60.66, "AlloProfClusteringS2S": 35.05, "HALClusteringS2S": 20.9, "MLSUMClusteringP2P": 43.5, "MLSUMClusteringS2S": 30.99, "MasakhaNEWSClusteringP2P (fra)": 49.71, "MasakhaNEWSClusteringS2S (fra)": 42.23 } ] }, "PairClassification": { "ap": [ { "Model": "bert-base-multilingual-uncased", "OpusparcusPC (fr)": 87.43, "PawsXPairClassification (fr)": 53.22 } ] }, "Reranking": { "map": [ { "Model": "bert-base-multilingual-uncased", "AlloprofReranking": 38.85, "SyntecReranking": 66.4 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "bert-base-multilingual-uncased", "AlloprofRetrieval": 5.51, "BSARDRetrieval": 0.0, "MintakaRetrieval (fr)": 2.87, "SyntecRetrieval": 34.95, "XPQARetrieval (fr)": 26.12 } ] }, "STS": { "spearman": [ { "Model": "bert-base-multilingual-uncased", "SICKFr": 58.26, "STS22 (fr)": 56.47, "STSBenchmarkMultilingualSTS (fr)": 54.97 } ] }, "Summarization": { "spearman": [ { "Model": "bert-base-multilingual-uncased", "SummEvalFr": 30.72 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "bert-base-multilingual-uncased" } ] } }, "bge-m3": { "BitextMining": { "f1": [ { "Model": "bge-m3", "Tatoeba (rus-Cyrl_eng-Latn)": 93.42 } ] }, "Classification": { "accuracy": [ { "Model": "bge-m3", "GeoreviewClassification (rus-Cyrl)": 48.27, "HeadlineClassification (rus-Cyrl)": 70.32, "InappropriatenessClassification (rus-Cyrl)": 59.87, "KinopoiskClassification (rus-Cyrl)": 58.23, "MassiveIntentClassification (rus-Cyrl)": 68.75, "MassiveScenarioClassification (rus-Cyrl)": 73.42, "RuReviewsClassification (rus-Cyrl)": 66.91, "RuSciBenchGRNTIClassification (rus-Cyrl)": 55.81, "RuSciBenchOECDClassification (rus-Cyrl)": 42.57 } ] }, "Clustering": { "v_measure": [ { "Model": "bge-m3", "GeoreviewClusteringP2P (rus-Cyrl)": 63.75, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 50.57, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 43.21 } ] }, "PairClassification": { "ap": [ { "Model": "bge-m3", "OpusparcusPC (rus-Cyrl)": 89.64, "TERRa (rus-Cyrl)": 60.6 } ] }, "Reranking": { "map": [ { "Model": "bge-m3", "RuBQReranking (rus-Cyrl)": 74.02 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "bge-m3", "ARCChallenge": 9.02, "AlphaNLI": 24.73, "HellaSwag": 25.67, "LEMBNarrativeQARetrieval": 45.76, "LEMBNeedleRetrieval": 40.25, "LEMBPasskeyRetrieval": 46.0, "LEMBQMSumRetrieval": 35.54, "LEMBSummScreenFDRetrieval": 94.09, "LEMBWikimQARetrieval": 77.73, "PIQA": 22.93, "Quail": 7.51, "RARbCode": 38.8, "RARbMath": 69.19, "RiaNewsRetrieval (rus-Cyrl)": 82.98, "RuBQRetrieval (rus-Cyrl)": 71.21, "SIQA": 4.89, "SpartQA": 7.49, "TempReasonL1": 0.99, "TempReasonL2Fact": 33.23, "TempReasonL2Pure": 0.68, "TempReasonL3Fact": 30.05, "TempReasonL3Pure": 5.28, "WinoGrande": 41.72 } ] }, "STS": { "spearman": [ { "Model": "bge-m3", "RUParaPhraserSTS (rus-Cyrl)": 74.9, "RuSTSBenchmarkSTS (rus-Cyrl)": 79.87, "STSBenchmarkMultilingualSTS (rus-Cyrl)": 79.27 } ] }, "Summarization": { "spearman": [ { "Model": "bge-m3" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "bge-m3" } ] } }, "sentence-camembert-base": { "BitextMining": { "f1": [ { "Model": "sentence-camembert-base" } ] }, "Classification": { "accuracy": [ { "Model": "sentence-camembert-base", "AmazonReviewsClassification (fr)": 36.03, "MTOPDomainClassification (fr)": 77.1, "MTOPIntentClassification (fr)": 43.44, "MasakhaNEWSClassification (fra)": 70.36, "MassiveIntentClassification (fr)": 51.59, "MassiveScenarioClassification (fr)": 61.28 } ] }, "Clustering": { "v_measure": [ { "Model": "sentence-camembert-base", "AlloProfClusteringP2P": 59.09, "AlloProfClusteringS2S": 38.92, "HALClusteringS2S": 20.22, "MLSUMClusteringP2P": 35.98, "MLSUMClusteringS2S": 27.05, "MasakhaNEWSClusteringP2P (fra)": 36.03, "MasakhaNEWSClusteringS2S (fra)": 30.77 } ] }, "PairClassification": { "ap": [ { "Model": "sentence-camembert-base", "OpusparcusPC (fr)": 92.05, "PawsXPairClassification (fr)": 57.44 } ] }, "Reranking": { "map": [ { "Model": "sentence-camembert-base", "AlloprofReranking": 48.68, "SyntecReranking": 79.75 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "sentence-camembert-base", "AlloprofRetrieval": 21.94, "BSARDRetrieval": 0.0, "MintakaRetrieval (fr)": 13.36, "SyntecRetrieval": 68.62, "XPQARetrieval (fr)": 57.92 } ] }, "STS": { "spearman": [ { "Model": "sentence-camembert-base", "SICKFr": 74.18, "STS22 (fr)": 77.54, "STSBenchmarkMultilingualSTS (fr)": 81.64 } ] }, "Summarization": { "spearman": [ { "Model": "sentence-camembert-base", "SummEvalFr": 28.77 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "sentence-camembert-base" } ] } }, "all-mpnet-base-v2": { "BitextMining": { "f1": [ { "Model": "all-mpnet-base-v2", "BornholmBitextMining (dan-Latn)": 27.44, "Tatoeba (pol-Latn_eng-Latn)": 4.09, "Tatoeba (ita-Latn_eng-Latn)": 11.1, "Tatoeba (cat-Latn_eng-Latn)": 9.44, "Tatoeba (aze-Latn_eng-Latn)": 1.49, "Tatoeba (eus-Latn_eng-Latn)": 3.94, "Tatoeba (epo-Latn_eng-Latn)": 7.15, "Tatoeba (lit-Latn_eng-Latn)": 1.02, "Tatoeba (ast-Latn_eng-Latn)": 9.78, "Tatoeba (bul-Cyrl_eng-Latn)": 0.35, "Tatoeba (ceb-Latn_eng-Latn)": 4.41, "Tatoeba (mkd-Cyrl_eng-Latn)": 0.0, "Tatoeba (tzl-Latn_eng-Latn)": 3.55, "Tatoeba (zsm-Latn_eng-Latn)": 4.75, "Tatoeba (mhr-Cyrl_eng-Latn)": 0.17, "Tatoeba (pam-Latn_eng-Latn)": 4.32, "Tatoeba (amh-Ethi_eng-Latn)": 0.0, "Tatoeba (slv-Latn_eng-Latn)": 3.73, "Tatoeba (lvs-Latn_eng-Latn)": 2.98, "Tatoeba (sqi-Latn_eng-Latn)": 3.45, "Tatoeba (orv-Cyrl_eng-Latn)": 0.0, "Tatoeba (vie-Latn_eng-Latn)": 4.96, "Tatoeba (pes-Arab_eng-Latn)": 0.2, "Tatoeba (por-Latn_eng-Latn)": 10.48, "Tatoeba (dtp-Latn_eng-Latn)": 3.54, "Tatoeba (yid-Hebr_eng-Latn)": 0.08, "Tatoeba (isl-Latn_eng-Latn)": 3.86, "Tatoeba (cha-Latn_eng-Latn)": 12.2, "Tatoeba (ron-Latn_eng-Latn)": 7.34, "Tatoeba (hye-Armn_eng-Latn)": 0.14, "Tatoeba (mar-Deva_eng-Latn)": 0.11, "Tatoeba (hin-Deva_eng-Latn)": 0.02, "Tatoeba (kor-Hang_eng-Latn)": 0.32, "Tatoeba (srp-Cyrl_eng-Latn)": 1.89, "Tatoeba (csb-Latn_eng-Latn)": 4.19, "Tatoeba (jpn-Jpan_eng-Latn)": 1.71, "Tatoeba (ber-Tfng_eng-Latn)": 4.56, "Tatoeba (wuu-Hans_eng-Latn)": 0.91, "Tatoeba (jav-Latn_eng-Latn)": 3.17, "Tatoeba (nob-Latn_eng-Latn)": 4.37, "Tatoeba (bre-Latn_eng-Latn)": 3.65, "Tatoeba (kzj-Latn_eng-Latn)": 3.62, "Tatoeba (urd-Arab_eng-Latn)": 0.0, "Tatoeba (ces-Latn_eng-Latn)": 3.56, "Tatoeba (cbk-Latn_eng-Latn)": 9.33, "Tatoeba (gla-Latn_eng-Latn)": 2.04, "Tatoeba (war-Latn_eng-Latn)": 5.14, "Tatoeba (swh-Latn_eng-Latn)": 6.01, "Tatoeba (swg-Latn_eng-Latn)": 7.86, "Tatoeba (glg-Latn_eng-Latn)": 12.0, "Tatoeba (fao-Latn_eng-Latn)": 7.08, "Tatoeba (gsw-Latn_eng-Latn)": 10.67, "Tatoeba (rus-Cyrl_eng-Latn)": 0.14, "Tatoeba (kaz-Cyrl_eng-Latn)": 0.52, "Tatoeba (gle-Latn_eng-Latn)": 2.19, "Tatoeba (slk-Latn_eng-Latn)": 3.4, "Tatoeba (nno-Latn_eng-Latn)": 5.75, "Tatoeba (cor-Latn_eng-Latn)": 2.42, "Tatoeba (nov-Latn_eng-Latn)": 16.61, "Tatoeba (swe-Latn_eng-Latn)": 6.55, "Tatoeba (max-Deva_eng-Latn)": 6.46, "Tatoeba (oci-Latn_eng-Latn)": 8.57, "Tatoeba (lfn-Latn_eng-Latn)": 6.1, "Tatoeba (fra-Latn_eng-Latn)": 16.9, "Tatoeba (ben-Beng_eng-Latn)": 0.0, "Tatoeba (bel-Cyrl_eng-Latn)": 0.65, "Tatoeba (lat-Latn_eng-Latn)": 5.78, "Tatoeba (cmn-Hans_eng-Latn)": 2.22, "Tatoeba (kat-Geor_eng-Latn)": 0.43, "Tatoeba (bos-Latn_eng-Latn)": 4.6, "Tatoeba (xho-Latn_eng-Latn)": 3.3, "Tatoeba (tha-Thai_eng-Latn)": 0.0, "Tatoeba (cym-Latn_eng-Latn)": 4.88, "Tatoeba (deu-Latn_eng-Latn)": 11.46, "Tatoeba (awa-Deva_eng-Latn)": 0.44, "Tatoeba (ido-Latn_eng-Latn)": 9.84, "Tatoeba (tat-Cyrl_eng-Latn)": 0.24, "Tatoeba (kab-Latn_eng-Latn)": 1.31, "Tatoeba (uzb-Latn_eng-Latn)": 1.98, "Tatoeba (heb-Hebr_eng-Latn)": 0.28, "Tatoeba (ara-Arab_eng-Latn)": 0.1, "Tatoeba (fry-Latn_eng-Latn)": 12.43, "Tatoeba (afr-Latn_eng-Latn)": 6.08, "Tatoeba (kur-Latn_eng-Latn)": 3.65, "Tatoeba (pms-Latn_eng-Latn)": 7.63, "Tatoeba (ell-Grek_eng-Latn)": 0.0, "Tatoeba (spa-Latn_eng-Latn)": 10.12, "Tatoeba (dsb-Latn_eng-Latn)": 2.96, "Tatoeba (uig-Arab_eng-Latn)": 0.33, "Tatoeba (nld-Latn_eng-Latn)": 9.29, "Tatoeba (tel-Telu_eng-Latn)": 0.73, "Tatoeba (hrv-Latn_eng-Latn)": 3.77, "Tatoeba (nds-Latn_eng-Latn)": 10.96, "Tatoeba (hun-Latn_eng-Latn)": 3.23, "Tatoeba (est-Latn_eng-Latn)": 2.35, "Tatoeba (mal-Mlym_eng-Latn)": 0.15, "Tatoeba (khm-Khmr_eng-Latn)": 0.28, "Tatoeba (hsb-Latn_eng-Latn)": 3.12, "Tatoeba (tgl-Latn_eng-Latn)": 4.06, "Tatoeba (ang-Latn_eng-Latn)": 9.77, "Tatoeba (tur-Latn_eng-Latn)": 3.16, "Tatoeba (tuk-Latn_eng-Latn)": 2.23, "Tatoeba (ile-Latn_eng-Latn)": 17.84, "Tatoeba (mon-Cyrl_eng-Latn)": 0.81, "Tatoeba (yue-Hant_eng-Latn)": 1.16, "Tatoeba (ina-Latn_eng-Latn)": 22.55, "Tatoeba (tam-Taml_eng-Latn)": 0.73, "Tatoeba (ukr-Cyrl_eng-Latn)": 0.5, "Tatoeba (dan-Latn_eng-Latn)": 10.01, "Tatoeba (arq-Arab_eng-Latn)": 0.33, "Tatoeba (arz-Arab_eng-Latn)": 0.0, "Tatoeba (fin-Latn_eng-Latn)": 3.82, "Tatoeba (ind-Latn_eng-Latn)": 4.88 } ] }, "Classification": { "accuracy": [ { "Model": "all-mpnet-base-v2", "AllegroReviews (pol-Latn)": 22.99, "AmazonCounterfactualClassification (en-ext)": 67.5, "AmazonCounterfactualClassification (en)": 65.03, "AmazonCounterfactualClassification (deu-Latn)": 55.66, "AmazonCounterfactualClassification (jpn-Jpan)": 60.69, "AmazonPolarityClassification": 67.14, "AmazonReviewsClassification (en)": 31.44, "AmazonReviewsClassification (deu-Latn)": 26.05, "AmazonReviewsClassification (spa-Latn)": 27.73, "AmazonReviewsClassification (fra-Latn)": 28.49, "AmazonReviewsClassification (jpn-Jpan)": 23.65, "AmazonReviewsClassification (cmn-Hans)": 23.62, "AngryTweetsClassification (dan-Latn)": 44.13, "Banking77Classification": 81.7, "CBD (pol-Latn)": 50.25, "DanishPoliticalCommentsClassification (dan-Latn)": 28.31, "EmotionClassification": 42.22, "GeoreviewClassification (rus-Cyrl)": 25.93, "HeadlineClassification (rus-Cyrl)": 28.53, "IFlyTek (cmn-Hans)": 17.18, "ImdbClassification": 71.17, "InappropriatenessClassification (rus-Cyrl)": 51.82, "JDReview (cmn-Hans)": 60.19, "KinopoiskClassification (rus-Cyrl)": 34.18, "LccSentimentClassification (dan-Latn)": 39.27, "MTOPDomainClassification (en)": 91.89, "MTOPDomainClassification (deu-Latn)": 71.86, "MTOPDomainClassification (spa-Latn)": 71.3, "MTOPDomainClassification (fra-Latn)": 74.88, "MTOPDomainClassification (hin-Deva)": 39.93, "MTOPDomainClassification (tha-Thai)": 17.54, "MTOPIntentClassification (en)": 68.27, "MTOPIntentClassification (deu-Latn)": 44.36, "MTOPIntentClassification (spa-Latn)": 39.48, "MTOPIntentClassification (fra-Latn)": 37.57, "MTOPIntentClassification (hin-Deva)": 18.63, "MTOPIntentClassification (tha-Thai)": 5.42, "MasakhaNEWSClassification (amh-Ethi)": 36.49, "MasakhaNEWSClassification (eng)": 79.75, "MasakhaNEWSClassification (fra-Latn)": 77.77, "MasakhaNEWSClassification (hau-Latn)": 59.22, "MasakhaNEWSClassification (ibo-Latn)": 61.64, "MasakhaNEWSClassification (lin-Latn)": 74.0, "MasakhaNEWSClassification (lug-Latn)": 58.43, "MasakhaNEWSClassification (orm-Ethi)": 48.15, "MasakhaNEWSClassification (pcm-Latn)": 92.2, "MasakhaNEWSClassification (run-Latn)": 64.72, "MasakhaNEWSClassification (sna-Latn)": 73.69, "MasakhaNEWSClassification (som-Latn)": 49.97, "MasakhaNEWSClassification (swa-Latn)": 55.15, "MasakhaNEWSClassification (tir-Ethi)": 27.46, "MasakhaNEWSClassification (xho-Latn)": 60.98, "MasakhaNEWSClassification (yor-Latn)": 63.33, "MassiveIntentClassification (en)": 69.76, "MassiveIntentClassification (jav-Latn)": 31.75, "MassiveIntentClassification (fra-Latn)": 44.27, "MassiveIntentClassification (msa-Latn)": 30.53, "MassiveIntentClassification (hun-Latn)": 34.38, "MassiveIntentClassification (pol-Latn)": 34.26, "MassiveIntentClassification (nld-Latn)": 38.49, "MassiveIntentClassification (tha-Thai)": 8.51, "MassiveIntentClassification (tur-Latn)": 32.02, "MassiveIntentClassification (tam-Taml)": 9.25, "MassiveIntentClassification (hye-Armn)": 10.11, "MassiveIntentClassification (khm-Khmr)": 4.74, "MassiveIntentClassification (lav-Latn)": 35.08, "MassiveIntentClassification (deu-Latn)": 44.54, "MassiveIntentClassification (spa-Latn)": 39.75, "MassiveIntentClassification (ben-Beng)": 12.35, "MassiveIntentClassification (por-Latn)": 42.83, "MassiveIntentClassification (ara-Arab)": 20.42, "MassiveIntentClassification (cym-Latn)": 30.82, "MassiveIntentClassification (dan-Latn)": 42.36, "MassiveIntentClassification (mya-Mymr)": 4.6, "MassiveIntentClassification (heb-Hebr)": 23.6, "MassiveIntentClassification (kan-Knda)": 3.76, "MassiveIntentClassification (swa-Latn)": 31.82, "MassiveIntentClassification (fas-Arab)": 22.45, "MassiveIntentClassification (hin-Deva)": 17.68, "MassiveIntentClassification (kat-Geor)": 7.66, "MassiveIntentClassification (mal-Mlym)": 2.64, "MassiveIntentClassification (fin-Latn)": 34.58, "MassiveIntentClassification (slv-Latn)": 34.49, "MassiveIntentClassification (afr-Latn)": 36.49, "MassiveIntentClassification (urd-Arab)": 12.86, "MassiveIntentClassification (ron-Latn)": 38.07, "MassiveIntentClassification (sqi-Latn)": 37.26, "MassiveIntentClassification (cmo-Hant)": 22.43, "MassiveIntentClassification (ita-Latn)": 40.29, "MassiveIntentClassification (ind-Latn)": 36.31, "MassiveIntentClassification (nob-Latn)": 39.3, "MassiveIntentClassification (jpn-Jpan)": 33.13, "MassiveIntentClassification (aze-Latn)": 28.92, "MassiveIntentClassification (mon-Cyrl)": 19.65, "MassiveIntentClassification (ell-Grek)": 24.52, "MassiveIntentClassification (rus-Cyrl)": 23.98, "MassiveIntentClassification (kor-Kore)": 13.35, "MassiveIntentClassification (cmo-Hans)": 24.36, "MassiveIntentClassification (isl-Latn)": 31.46, "MassiveIntentClassification (swe-Latn)": 39.02, "MassiveIntentClassification (tel-Telu)": 2.26, "MassiveIntentClassification (vie-Latn)": 31.47, "MassiveIntentClassification (tgl-Latn)": 36.33, "MassiveIntentClassification (amh-Ethi)": 2.39, "MassiveScenarioClassification (en)": 75.67, "MassiveScenarioClassification (tur-Latn)": 39.11, "MassiveScenarioClassification (kat-Geor)": 13.45, "MassiveScenarioClassification (jpn-Jpan)": 40.57, "MassiveScenarioClassification (spa-Latn)": 50.92, "MassiveScenarioClassification (fas-Arab)": 27.8, "MassiveScenarioClassification (hun-Latn)": 41.01, "MassiveScenarioClassification (jav-Latn)": 40.0, "MassiveScenarioClassification (por-Latn)": 52.06, "MassiveScenarioClassification (sqi-Latn)": 44.67, "MassiveScenarioClassification (lav-Latn)": 39.28, "MassiveScenarioClassification (deu-Latn)": 54.09, "MassiveScenarioClassification (nld-Latn)": 47.79, "MassiveScenarioClassification (mon-Cyrl)": 25.58, "MassiveScenarioClassification (swa-Latn)": 40.34, "MassiveScenarioClassification (ben-Beng)": 17.49, "MassiveScenarioClassification (cym-Latn)": 34.82, "MassiveScenarioClassification (swe-Latn)": 44.53, "MassiveScenarioClassification (rus-Cyrl)": 28.71, "MassiveScenarioClassification (fra-Latn)": 54.26, "MassiveScenarioClassification (dan-Latn)": 49.45, "MassiveScenarioClassification (mya-Mymr)": 10.8, "MassiveScenarioClassification (ron-Latn)": 47.86, "MassiveScenarioClassification (cmo-Hans)": 35.33, "MassiveScenarioClassification (hin-Deva)": 23.13, "MassiveScenarioClassification (cmo-Hant)": 31.7, "MassiveScenarioClassification (afr-Latn)": 43.63, "MassiveScenarioClassification (aze-Latn)": 36.42, "MassiveScenarioClassification (msa-Latn)": 37.28, "MassiveScenarioClassification (ell-Grek)": 33.85, "MassiveScenarioClassification (isl-Latn)": 39.36, "MassiveScenarioClassification (fin-Latn)": 38.41, "MassiveScenarioClassification (ind-Latn)": 43.05, "MassiveScenarioClassification (pol-Latn)": 42.66, "MassiveScenarioClassification (tam-Taml)": 14.55, "MassiveScenarioClassification (ita-Latn)": 51.37, "MassiveScenarioClassification (urd-Arab)": 20.0, "MassiveScenarioClassification (kan-Knda)": 8.34, "MassiveScenarioClassification (tel-Telu)": 7.81, "MassiveScenarioClassification (mal-Mlym)": 7.69, "MassiveScenarioClassification (ara-Arab)": 27.8, "MassiveScenarioClassification (kor-Kore)": 17.28, "MassiveScenarioClassification (vie-Latn)": 35.9, "MassiveScenarioClassification (amh-Ethi)": 7.43, "MassiveScenarioClassification (heb-Hebr)": 25.49, "MassiveScenarioClassification (hye-Armn)": 16.86, "MassiveScenarioClassification (khm-Khmr)": 9.63, "MassiveScenarioClassification (slv-Latn)": 39.88, "MassiveScenarioClassification (tgl-Latn)": 47.04, "MassiveScenarioClassification (nob-Latn)": 45.75, "MassiveScenarioClassification (tha-Thai)": 17.01, "MultilingualSentiment (cmn-Hans)": 41.2, "NoRecClassification (nob-Latn)": 38.34, "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 50.15, "OnlineShopping (cmn-Hans)": 56.94, "PAC (pol-Latn)": 62.1, "PolEmo2.0-IN (pol-Latn)": 41.63, "PolEmo2.0-OUT (pol-Latn)": 25.0, "RuReviewsClassification (rus-Cyrl)": 42.33, "RuSciBenchGRNTIClassification (rus-Cyrl)": 13.29, "RuSciBenchOECDClassification (rus-Cyrl)": 10.62, "TNews (cmn-Hans)": 21.05, "ToxicConversationsClassification": 61.05, "TweetSentimentExtractionClassification": 55.05, "Waimai (cmn-Hans)": 63.31 } ] }, "Clustering": { "v_measure": [ { "Model": "all-mpnet-base-v2", "ArxivClusteringP2P": 48.38, "ArxivClusteringS2S": 39.72, "BiorxivClusteringP2P": 39.62, "BiorxivClusteringS2S": 35.02, "GeoreviewClusteringP2P (rus-Cyrl)": 20.33, "MasakhaNEWSClusteringP2P (amh-Ethi)": 42.49, "MasakhaNEWSClusteringP2P (eng)": 67.24, "MasakhaNEWSClusteringP2P (fra-Latn)": 61.99, "MasakhaNEWSClusteringP2P (hau-Latn)": 37.17, "MasakhaNEWSClusteringP2P (ibo-Latn)": 52.0, "MasakhaNEWSClusteringP2P (lin-Latn)": 69.68, "MasakhaNEWSClusteringP2P (lug-Latn)": 50.96, "MasakhaNEWSClusteringP2P (orm-Ethi)": 28.42, "MasakhaNEWSClusteringP2P (pcm-Latn)": 64.01, "MasakhaNEWSClusteringP2P (run-Latn)": 57.6, "MasakhaNEWSClusteringP2P (sna-Latn)": 54.99, "MasakhaNEWSClusteringP2P (som-Latn)": 31.16, "MasakhaNEWSClusteringP2P (swa-Latn)": 28.29, "MasakhaNEWSClusteringP2P (tir-Ethi)": 41.85, "MasakhaNEWSClusteringP2P (xho-Latn)": 35.24, "MasakhaNEWSClusteringP2P (yor-Latn)": 42.15, "MasakhaNEWSClusteringS2S (amh-Ethi)": 44.48, "MasakhaNEWSClusteringS2S (eng)": 35.69, "MasakhaNEWSClusteringS2S (fra-Latn)": 41.05, "MasakhaNEWSClusteringS2S (hau-Latn)": 16.64, "MasakhaNEWSClusteringS2S (ibo-Latn)": 38.63, "MasakhaNEWSClusteringS2S (lin-Latn)": 70.72, "MasakhaNEWSClusteringS2S (lug-Latn)": 46.97, "MasakhaNEWSClusteringS2S (orm-Ethi)": 23.85, "MasakhaNEWSClusteringS2S (pcm-Latn)": 68.7, "MasakhaNEWSClusteringS2S (run-Latn)": 52.27, "MasakhaNEWSClusteringS2S (sna-Latn)": 47.64, "MasakhaNEWSClusteringS2S (som-Latn)": 30.94, "MasakhaNEWSClusteringS2S (swa-Latn)": 17.12, "MasakhaNEWSClusteringS2S (tir-Ethi)": 42.01, "MasakhaNEWSClusteringS2S (xho-Latn)": 24.16, "MasakhaNEWSClusteringS2S (yor-Latn)": 35.04, "MedrxivClusteringP2P": 35.58, "MedrxivClusteringS2S": 32.87, "RedditClustering": 54.82, "RedditClusteringP2P": 56.77, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 14.66, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 12.49, "StackExchangeClustering": 53.8, "StackExchangeClusteringP2P": 34.28, "TwentyNewsgroupsClustering": 49.74 } ] }, "PairClassification": { "ap": [ { "Model": "all-mpnet-base-v2", "CDSC-E (pol-Latn)": 45.37, "OpusparcusPC (deu-Latn)": 89.78, "OpusparcusPC (en)": 97.75, "OpusparcusPC (fin-Latn)": 85.82, "OpusparcusPC (fra-Latn)": 86.61, "OpusparcusPC (rus-Cyrl)": 79.85, "OpusparcusPC (swe-Latn)": 81.81, "PSC (pol-Latn)": 83.28, "PawsXPairClassification (deu-Latn)": 52.17, "PawsXPairClassification (en)": 61.99, "PawsXPairClassification (spa-Latn)": 55.06, "PawsXPairClassification (fra-Latn)": 56.42, "PawsXPairClassification (jpn-Hira)": 47.43, "PawsXPairClassification (kor-Hang)": 49.75, "PawsXPairClassification (cmn-Hans)": 52.47, "SICK-E-PL (pol-Latn)": 46.51, "SprintDuplicateQuestions": 90.15, "TERRa (rus-Cyrl)": 44.52, "TwitterSemEval2015": 73.85, "TwitterURLCorpus": 85.11 } ] }, "Reranking": { "map": [ { "Model": "all-mpnet-base-v2", "AlloprofReranking (fra-Latn)": 69.63, "AskUbuntuDupQuestions": 65.85, "MMarcoReranking (cmn-Hans)": 4.65, "MindSmallReranking": 30.97, "RuBQReranking (rus-Cyrl)": 30.96, "SciDocsRR": 88.65, "StackOverflowDupQuestions": 51.98, "SyntecReranking (fra-Latn)": 66.12, "T2Reranking (cmn-Hans)": 58.3 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "all-mpnet-base-v2", "AILACasedocs": 22.51, "AILAStatutes": 21.27, "ARCChallenge": 11.8, "AlloprofRetrieval (fra-Latn)": 34.27, "AlphaNLI": 22.41, "ArguAna": 46.52, "ArguAna-PL (pol-Latn)": 14.72, "BSARDRetrieval (fra-Latn)": 6.98, "BrightRetrieval (robotics)": 8.36, "BrightRetrieval (psychology)": 22.63, "BrightRetrieval (leetcode)": 26.4, "BrightRetrieval (biology)": 15.52, "BrightRetrieval (theoremqa_questions)": 18.49, "BrightRetrieval (economics)": 16.64, "BrightRetrieval (stackoverflow)": 9.48, "BrightRetrieval (pony)": 6.95, "BrightRetrieval (earth_science)": 20.11, "BrightRetrieval (theoremqa_theorems)": 12.38, "BrightRetrieval (sustainable_living)": 15.34, "BrightRetrieval (aops)": 5.32, "CQADupstackRetrieval": 44.96, "ClimateFEVER": 21.97, "CmedqaRetrieval (cmn-Hans)": 2.0, "CovidRetrieval (cmn-Hans)": 3.7, "DBPedia": 32.09, "DuRetrieval (cmn-Hans)": 4.92, "EcomRetrieval (cmn-Hans)": 3.94, "FEVER": 50.86, "FiQA-PL (pol-Latn)": 3.6, "FiQA2018": 49.96, "GerDaLIRSmall (deu-Latn)": 3.78, "HellaSwag": 26.27, "HotpotQA": 39.29, "LEMBNarrativeQARetrieval": 19.34, "LEMBNeedleRetrieval": 16.0, "LEMBPasskeyRetrieval": 24.5, "LEMBQMSumRetrieval": 21.54, "LEMBSummScreenFDRetrieval": 60.43, "LEMBWikimQARetrieval": 44.92, "LeCaRDv2 (zho-Hans)": 18.09, "LegalBenchConsumerContractsQA": 75.25, "LegalBenchCorporateLobbying": 89.04, "LegalQuAD (deu-Latn)": 10.67, "LegalSummarization": 58.55, "MMarcoRetrieval (cmn-Hans)": 7.13, "MSMARCO": 39.75, "MedicalRetrieval (cmn-Hans)": 1.71, "MintakaRetrieval (ara-Arab)": 1.97, "MintakaRetrieval (deu-Latn)": 17.21, "MintakaRetrieval (spa-Latn)": 10.11, "MintakaRetrieval (fra-Latn)": 12.93, "MintakaRetrieval (hin-Deva)": 2.05, "MintakaRetrieval (ita-Latn)": 5.63, "MintakaRetrieval (jpn-Hira)": 6.72, "MintakaRetrieval (por-Latn)": 8.05, "NFCorpus": 33.29, "NFCorpus-PL (pol-Latn)": 8.77, "NQ": 50.45, "PIQA": 29.03, "Quail": 3.41, "QuoraRetrieval": 87.46, "RARbCode": 53.21, "RARbMath": 71.85, "RuBQRetrieval (rus-Cyrl)": 4.75, "SCIDOCS": 23.76, "SCIDOCS-PL (pol-Latn)": 4.02, "SIQA": 2.38, "SciFact": 65.57, "SciFact-PL (pol-Latn)": 13.31, "SpartQA": 0.22, "SyntecRetrieval (fra-Latn)": 57.39, "T2Retrieval (cmn-Hans)": 2.98, "TRECCOVID": 51.33, "TRECCOVID-PL (pol-Latn)": 12.12, "TempReasonL1": 1.77, "TempReasonL2Fact": 11.2, "TempReasonL2Pure": 1.15, "TempReasonL3Fact": 9.42, "TempReasonL3Pure": 5.59, "Touche2020": 19.93, "VideoRetrieval (cmn-Hans)": 8.48, "WinoGrande": 20.8, "XPQARetrieval (ara-Arab_ara-Arab)": 9.42, "XPQARetrieval (eng-Latn_ara-Arab)": 2.39, "XPQARetrieval (ara-Arab_eng-Latn)": 8.98, "XPQARetrieval (deu-Latn_deu-Latn)": 55.82, "XPQARetrieval (eng-Latn_deu-Latn)": 11.74, "XPQARetrieval (deu-Latn_eng-Latn)": 30.44, "XPQARetrieval (spa-Latn_spa-Latn)": 40.01, "XPQARetrieval (eng-Latn_spa-Latn)": 6.12, "XPQARetrieval (spa-Latn_eng-Latn)": 29.44, "XPQARetrieval (fra-Latn_fra-Latn)": 51.94, "XPQARetrieval (eng-Latn_fra-Latn)": 11.48, "XPQARetrieval (fra-Latn_eng-Latn)": 32.52, "XPQARetrieval (hin-Deva_hin-Deva)": 37.48, "XPQARetrieval (eng-Latn_hin-Deva)": 5.11, "XPQARetrieval (hin-Deva_eng-Latn)": 7.37, "XPQARetrieval (ita-Latn_ita-Latn)": 54.2, "XPQARetrieval (eng-Latn_ita-Latn)": 6.08, "XPQARetrieval (ita-Latn_eng-Latn)": 30.32, "XPQARetrieval (jpn-Hira_jpn-Hira)": 37.45, "XPQARetrieval (eng-Latn_jpn-Hira)": 5.79, "XPQARetrieval (jpn-Hira_eng-Latn)": 14.77, "XPQARetrieval (kor-Hang_kor-Hang)": 10.4, "XPQARetrieval (eng-Latn_kor-Hang)": 7.09, "XPQARetrieval (kor-Hang_eng-Latn)": 6.95, "XPQARetrieval (pol-Latn_pol-Latn)": 23.67, "XPQARetrieval (eng-Latn_pol-Latn)": 8.83, "XPQARetrieval (pol-Latn_eng-Latn)": 15.94, "XPQARetrieval (por-Latn_por-Latn)": 33.56, "XPQARetrieval (eng-Latn_por-Latn)": 3.76, "XPQARetrieval (por-Latn_eng-Latn)": 23.45, "XPQARetrieval (tam-Taml_tam-Taml)": 5.53, "XPQARetrieval (eng-Latn_tam-Taml)": 3.3, "XPQARetrieval (tam-Taml_eng-Latn)": 4.0, "XPQARetrieval (cmn-Hans_cmn-Hans)": 23.84, "XPQARetrieval (eng-Latn_cmn-Hans)": 7.2, "XPQARetrieval (cmn-Hans_eng-Latn)": 12.84 } ] }, "STS": { "spearman": [ { "Model": "all-mpnet-base-v2", "AFQMC (cmn-Hans)": 8.01, "ATEC (cmn-Hans)": 14.03, "BIOSSES": 80.43, "BQ (cmn-Hans)": 21.39, "CDSC-R (pol-Latn)": 77.04, "LCQMC (cmn-Hans)": 22.84, "PAWSX (cmn-Hans)": 6.44, "RUParaPhraserSTS (rus-Cyrl)": 42.15, "RuSTSBenchmarkSTS (rus-Cyrl)": 55.68, "SICK-R": 80.59, "SICK-R-PL (pol-Latn)": 50.2, "SICKFr (fra-Latn)": 67.05, "STS12": 72.63, "STS13": 83.48, "STS14": 78.0, "STS15": 85.66, "STS16": 80.03, "STS17 (en-en)": 90.6, "STS17 (eng-Latn_ara-Arab)": 6.76, "STS17 (fra-Latn_eng-Latn)": 41.64, "STS17 (eng-Latn_tur-Latn)": -4.58, "STS17 (eng-Latn_deu-Latn)": 35.5, "STS17 (spa-Latn_eng-Latn)": 25.28, "STS17 (ita-Latn_eng-Latn)": 31.8, "STS17 (spa-Latn)": 78.4, "STS17 (kor-Hang)": 39.11, "STS17 (ara-Arab)": 55.42, "STS17 (nld-Latn_eng-Latn)": 32.89, "STS22 (en)": 68.39, "STS22 (spa-Latn_eng-Latn)": 55.09, "STS22 (deu-Latn_pol-Latn)": 23.53, "STS22 (cmn-Hans_eng-Latn)": 40.47, "STS22 (pol-Latn)": 24.21, "STS22 (tur-Latn)": 29.35, "STS22 (spa-Latn_ita-Latn)": 41.61, "STS22 (fra-Latn_pol-Latn)": 73.25, "STS22 (rus-Cyrl)": 15.83, "STS22 (deu-Latn)": 27.0, "STS22 (spa-Latn)": 55.98, "STS22 (pol-Latn_eng-Latn)": 51.07, "STS22 (fra-Latn)": 77.1, "STS22 (deu-Latn_eng-Latn)": 49.73, "STS22 (ara-Arab)": 38.96, "STS22 (deu-Latn_fra-Latn)": 31.39, "STS22 (ita-Latn)": 58.02, "STS22 (cmn-Hans)": 42.24, "STSB (cmn-Hans)": 37.7, "STSBenchmark": 83.42, "STSBenchmarkMultilingualSTS (nld-Latn)": 57.01, "STSBenchmarkMultilingualSTS (rus-Cyrl)": 55.54, "STSBenchmarkMultilingualSTS (fra-Latn)": 65.15, "STSBenchmarkMultilingualSTS (ita-Latn)": 62.72, "STSBenchmarkMultilingualSTS (spa-Latn)": 65.78, "STSBenchmarkMultilingualSTS (en)": 83.42, "STSBenchmarkMultilingualSTS (deu-Latn)": 61.43, "STSBenchmarkMultilingualSTS (por-Latn)": 62.12, "STSBenchmarkMultilingualSTS (cmn-Hans)": 39.43, "STSBenchmarkMultilingualSTS (pol-Latn)": 52.36 } ] }, "Summarization": { "spearman": [ { "Model": "all-mpnet-base-v2", "SummEval": 27.49, "SummEvalFr (fra-Latn)": 28.11 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "all-mpnet-base-v2" } ] } }, "glove.6B.300d": { "BitextMining": { "f1": [ { "Model": "glove.6B.300d", "BUCC (de-en)": 0.18, "BUCC (fr-en)": 0.19, "BUCC (ru-en)": 0.1, "BUCC (zh-en)": 0.0 } ] }, "Classification": { "accuracy": [ { "Model": "glove.6B.300d", "AmazonCounterfactualClassification (en)": 56.91, "AmazonPolarityClassification": 60.32, "AmazonReviewsClassification (en)": 29.67, "Banking77Classification": 67.69, "EmotionClassification": 36.93, "ImdbClassification": 62.57, "MTOPDomainClassification (en)": 79.11, "MTOPIntentClassification (en)": 55.85, "MassiveIntentClassification (en)": 56.19, "MassiveScenarioClassification (en)": 66.03, "ToxicConversationsClassification": 65.4, "TweetSentimentExtractionClassification": 50.8 } ] }, "Clustering": { "v_measure": [ { "Model": "glove.6B.300d", "ArxivClusteringP2P": 32.56, "ArxivClusteringS2S": 23.14, "BiorxivClusteringP2P": 29.27, "BiorxivClusteringS2S": 19.18, "MedrxivClusteringP2P": 26.12, "MedrxivClusteringS2S": 20.38, "RedditClustering": 28.46, "RedditClusteringP2P": 35.82, "StackExchangeClustering": 35.8, "StackExchangeClusteringP2P": 28.51, "TwentyNewsgroupsClustering": 25.83 } ] }, "PairClassification": { "ap": [ { "Model": "glove.6B.300d", "SprintDuplicateQuestions": 86.96, "TwitterSemEval2015": 48.45, "TwitterURLCorpus": 77.35 } ] }, "Reranking": { "map": [ { "Model": "glove.6B.300d", "AskUbuntuDupQuestions": 49.57, "MindSmallReranking": 27.01, "SciDocsRR": 62.56, "StackOverflowDupQuestions": 34.03 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "glove.6B.300d", "ArguAna": 36.3, "CQADupstackRetrieval": 15.47, "ClimateFEVER": 14.44, "DBPedia": 18.28, "FEVER": 14.99, "FiQA2018": 10.09, "HotpotQA": 19.18, "MSMARCO": 9.6, "NFCorpus": 13.87, "NQ": 12.87, "QuoraRetrieval": 71.32, "SCIDOCS": 8.04, "SciFact": 29.58, "TRECCOVID": 36.22, "Touche2020": 13.99 } ] }, "STS": { "spearman": [ { "Model": "glove.6B.300d", "BIOSSES": 44.93, "SICK-R": 55.43, "STS12": 54.64, "STS13": 69.16, "STS14": 60.81, "STS15": 72.31, "STS16": 65.34, "STS17 (en-en)": 77.95, "STS22 (en)": 56.35, "STSBenchmark": 61.54 } ] }, "Summarization": { "spearman": [ { "Model": "glove.6B.300d", "SummEval": 28.87 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "glove.6B.300d" } ] } }, "google-gecko.text-embedding-preview-0409": { "BitextMining": { "f1": [ { "Model": "google-gecko.text-embedding-preview-0409" } ] }, "Classification": { "accuracy": [ { "Model": "google-gecko.text-embedding-preview-0409", "AmazonCounterfactualClassification (en)": 75.34, "AmazonPolarityClassification": 97.34, "AmazonReviewsClassification (en)": 51.17, "Banking77Classification": 88.62, "EmotionClassification": 52.51, "ImdbClassification": 95.65, "MTOPDomainClassification (en)": 98.35, "MTOPIntentClassification (en)": 83.43, "MassiveIntentClassification (en)": 80.22, "MassiveScenarioClassification (en)": 87.19, "ToxicConversationsClassification": 89.67, "TweetSentimentExtractionClassification": 74.52 } ] }, "Clustering": { "v_measure": [ { "Model": "google-gecko.text-embedding-preview-0409", "ArxivClusteringP2P": 46.27, "ArxivClusteringS2S": 38.36, "BiorxivClusteringP2P": 37.87, "BiorxivClusteringS2S": 35.67, "MedrxivClusteringP2P": 33.11, "MedrxivClusteringS2S": 31.54, "RedditClustering": 65.81, "RedditClusteringP2P": 66.62, "StackExchangeClustering": 74.52, "StackExchangeClusteringP2P": 37.63, "TwentyNewsgroupsClustering": 54.87 } ] }, "PairClassification": { "ap": [ { "Model": "google-gecko.text-embedding-preview-0409", "SprintDuplicateQuestions": 96.26, "TwitterSemEval2015": 79.04, "TwitterURLCorpus": 87.53 } ] }, "Reranking": { "map": [ { "Model": "google-gecko.text-embedding-preview-0409", "AskUbuntuDupQuestions": 64.4, "MindSmallReranking": 33.07, "SciDocsRR": 83.59, "StackOverflowDupQuestions": 54.56 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "google-gecko.text-embedding-preview-0409", "ArguAna": 62.18, "BrightRetrieval (earth_science)": 34.38, "BrightRetrieval (leetcode)": 29.64, "BrightRetrieval (theoremqa_questions)": 21.51, "BrightRetrieval (aops)": 9.33, "BrightRetrieval (sustainable_living)": 17.25, "BrightRetrieval (pony)": 3.59, "BrightRetrieval (theoremqa_theorems)": 16.77, "BrightRetrieval (stackoverflow)": 17.93, "BrightRetrieval (biology)": 22.98, "BrightRetrieval (robotics)": 15.98, "BrightRetrieval (economics)": 19.5, "BrightRetrieval (psychology)": 27.86, "CQADupstackRetrieval": 48.89, "ClimateFEVER": 33.21, "DBPedia": 47.12, "FEVER": 86.96, "FiQA2018": 59.24, "HotpotQA": 71.33, "MSMARCO": 32.58, "NFCorpus": 40.33, "NQ": 61.28, "QuoraRetrieval": 88.18, "SCIDOCS": 20.34, "SciFact": 75.42, "TRECCOVID": 82.62, "Touche2020": 25.86 } ] }, "STS": { "spearman": [ { "Model": "google-gecko.text-embedding-preview-0409", "BIOSSES": 89.46, "SICK-R": 81.93, "STS12": 77.59, "STS13": 90.36, "STS14": 85.25, "STS15": 89.66, "STS16": 87.34, "STS17 (en-en)": 92.06, "STS22 (en)": 68.02, "STSBenchmark": 88.99 } ] }, "Summarization": { "spearman": [ { "Model": "google-gecko.text-embedding-preview-0409", "SummEval": 32.63 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "google-gecko.text-embedding-preview-0409", "Core17InstructionRetrieval": 5.44, "News21InstructionRetrieval": 3.94, "Robust04InstructionRetrieval": -2.4 } ] } }, "flaubert_base_cased": { "BitextMining": { "f1": [ { "Model": "flaubert_base_cased" } ] }, "Classification": { "accuracy": [ { "Model": "flaubert_base_cased", "AmazonReviewsClassification (fr)": 24.9, "MTOPDomainClassification (fr)": 25.55, "MTOPIntentClassification (fr)": 9.49, "MasakhaNEWSClassification (fra)": 71.14, "MassiveIntentClassification (fr)": 6.98, "MassiveScenarioClassification (fr)": 11.41 } ] }, "Clustering": { "v_measure": [ { "Model": "flaubert_base_cased", "AlloProfClusteringP2P": 52.86, "AlloProfClusteringS2S": 14.46, "HALClusteringS2S": 3.85, "MLSUMClusteringP2P": 39.06, "MLSUMClusteringS2S": 17.13, "MasakhaNEWSClusteringP2P (fra)": 41.61, "MasakhaNEWSClusteringS2S (fra)": 21.26 } ] }, "PairClassification": { "ap": [ { "Model": "flaubert_base_cased", "OpusparcusPC (fr)": 82.15, "PawsXPairClassification (fr)": 51.89 } ] }, "Reranking": { "map": [ { "Model": "flaubert_base_cased", "AlloprofReranking": 34.81, "SyntecReranking": 55.88 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "flaubert_base_cased", "AlloprofRetrieval": 1.63, "BSARDRetrieval": 0.0, "MintakaRetrieval (fr)": 0.58, "SyntecRetrieval": 20.56, "XPQARetrieval (fr)": 6.59 } ] }, "STS": { "spearman": [ { "Model": "flaubert_base_cased", "SICKFr": 53.86, "STS22 (fr)": 65.37, "STSBenchmarkMultilingualSTS (fr)": 37.14 } ] }, "Summarization": { "spearman": [ { "Model": "flaubert_base_cased", "SummEvalFr": 31.26 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "flaubert_base_cased" } ] } }, "LLM2Vec-Mistral-supervised": { "BitextMining": { "f1": [ { "Model": "LLM2Vec-Mistral-supervised" } ] }, "Classification": { "accuracy": [ { "Model": "LLM2Vec-Mistral-supervised", "AmazonCounterfactualClassification (en)": 77.58, "AmazonPolarityClassification": 91.12, "AmazonReviewsClassification (en)": 49.97, "Banking77Classification": 88.31, "EmotionClassification": 52.04, "ImdbClassification": 87.42, "MTOPDomainClassification (en)": 96.04, "MTOPIntentClassification (en)": 84.77, "MassiveIntentClassification (en)": 79.29, "MassiveScenarioClassification (en)": 81.64, "ToxicConversationsClassification": 69.26, "TweetSentimentExtractionClassification": 62.14 } ] }, "Clustering": { "v_measure": [ { "Model": "LLM2Vec-Mistral-supervised", "ArxivClusteringP2P": 42.81, "ArxivClusteringS2S": 44.24, "BiorxivClusteringP2P": 34.27, "BiorxivClusteringS2S": 35.53, "MedrxivClusteringP2P": 31.07, "MedrxivClusteringS2S": 31.27, "RedditClustering": 60.24, "RedditClusteringP2P": 64.12, "StackExchangeClustering": 70.73, "StackExchangeClusteringP2P": 34.5, "TwentyNewsgroupsClustering": 52.18 } ] }, "PairClassification": { "ap": [ { "Model": "LLM2Vec-Mistral-supervised", "SprintDuplicateQuestions": 96.82, "TwitterSemEval2015": 80.6, "TwitterURLCorpus": 86.56 } ] }, "Reranking": { "map": [ { "Model": "LLM2Vec-Mistral-supervised", "AskUbuntuDupQuestions": 63.98, "MindSmallReranking": 31.5, "SciDocsRR": 83.8, "StackOverflowDupQuestions": 54.41 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "LLM2Vec-Mistral-supervised", "ArguAna": 57.48, "CQADupstackRetrieval": 48.84, "ClimateFEVER": 35.19, "DBPedia": 49.58, "FEVER": 89.4, "FiQA2018": 53.11, "HotpotQA": 74.07, "MSMARCO": 42.17, "NFCorpus": 39.33, "NQ": 61.7, "QuoraRetrieval": 87.75, "SCIDOCS": 22.5, "SciFact": 78.86, "TRECCOVID": 77.69, "Touche2020": 22.18 } ] }, "STS": { "spearman": [ { "Model": "LLM2Vec-Mistral-supervised", "BIOSSES": 85.24, "SICK-R": 83.7, "STS12": 78.8, "STS13": 86.37, "STS14": 84.04, "STS15": 88.99, "STS16": 87.22, "STS17 (en-en)": 90.19, "STS22 (en)": 67.68, "STSBenchmark": 88.65 } ] }, "Summarization": { "spearman": [ { "Model": "LLM2Vec-Mistral-supervised", "SummEval": 29.96 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "LLM2Vec-Mistral-supervised" } ] } }, "bge-large-en-v1.5": { "BitextMining": { "f1": [ { "Model": "bge-large-en-v1.5" } ] }, "Classification": { "accuracy": [ { "Model": "bge-large-en-v1.5" } ] }, "Clustering": { "v_measure": [ { "Model": "bge-large-en-v1.5" } ] }, "PairClassification": { "ap": [ { "Model": "bge-large-en-v1.5" } ] }, "Reranking": { "map": [ { "Model": "bge-large-en-v1.5" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "bge-large-en-v1.5", "AILACasedocs": 25.15, "AILAStatutes": 20.74, "ARCChallenge": 9.99, "AlphaNLI": 13.13, "BrightRetrieval (stackoverflow)": 9.51, "BrightRetrieval (earth_science)": 24.15, "BrightRetrieval (aops)": 6.08, "BrightRetrieval (sustainable_living)": 13.27, "BrightRetrieval (psychology)": 17.44, "BrightRetrieval (robotics)": 12.21, "BrightRetrieval (theoremqa_theorems)": 5.51, "BrightRetrieval (pony)": 5.64, "BrightRetrieval (biology)": 11.96, "BrightRetrieval (theoremqa_questions)": 12.56, "BrightRetrieval (leetcode)": 26.68, "BrightRetrieval (economics)": 16.59, "GerDaLIRSmall": 3.96, "HellaSwag": 28.5, "LeCaRDv2": 22.68, "LegalBenchConsumerContractsQA": 73.52, "LegalBenchCorporateLobbying": 91.51, "LegalQuAD": 16.22, "LegalSummarization": 59.99, "PIQA": 27.99, "Quail": 1.83, "RARbCode": 48.12, "RARbMath": 57.36, "SIQA": 1.04, "SpartQA": 2.99, "TempReasonL1": 1.46, "TempReasonL2Fact": 24.25, "TempReasonL2Pure": 2.35, "TempReasonL3Fact": 20.64, "TempReasonL3Pure": 6.67, "WinoGrande": 19.18 } ] }, "STS": { "spearman": [ { "Model": "bge-large-en-v1.5" } ] }, "Summarization": { "spearman": [ { "Model": "bge-large-en-v1.5" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "bge-large-en-v1.5" } ] } }, "LLM2Vec-Llama-2-unsupervised": { "BitextMining": { "f1": [ { "Model": "LLM2Vec-Llama-2-unsupervised" } ] }, "Classification": { "accuracy": [ { "Model": "LLM2Vec-Llama-2-unsupervised", "AmazonCounterfactualClassification (en)": 76.91, "AmazonPolarityClassification": 79.05, "AmazonReviewsClassification (en)": 40.08, "Banking77Classification": 84.65, "EmotionClassification": 46.58, "ImdbClassification": 75.68, "MTOPDomainClassification (en)": 94.33, "MTOPIntentClassification (en)": 79.54, "MassiveIntentClassification (en)": 73.84, "MassiveScenarioClassification (en)": 79.17, "ToxicConversationsClassification": 71.81, "TweetSentimentExtractionClassification": 57.17 } ] }, "Clustering": { "v_measure": [ { "Model": "LLM2Vec-Llama-2-unsupervised", "ArxivClusteringP2P": 47.81, "ArxivClusteringS2S": 40.53, "BiorxivClusteringP2P": 38.12, "BiorxivClusteringS2S": 31.25, "MedrxivClusteringP2P": 30.94, "MedrxivClusteringS2S": 28.04, "RedditClustering": 42.84, "RedditClusteringP2P": 60.1, "StackExchangeClustering": 65.12, "StackExchangeClusteringP2P": 33.61, "TwentyNewsgroupsClustering": 30.76 } ] }, "PairClassification": { "ap": [ { "Model": "LLM2Vec-Llama-2-unsupervised", "SprintDuplicateQuestions": 87.57, "TwitterSemEval2015": 65.14, "TwitterURLCorpus": 80.94 } ] }, "Reranking": { "map": [ { "Model": "LLM2Vec-Llama-2-unsupervised", "AskUbuntuDupQuestions": 55.56, "MindSmallReranking": 30.86, "SciDocsRR": 77.62, "StackOverflowDupQuestions": 47.77 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "LLM2Vec-Llama-2-unsupervised", "ArguAna": 47.09, "CQADupstackRetrieval": 30.78, "ClimateFEVER": 20.67, "DBPedia": 25.81, "FEVER": 43.48, "FiQA2018": 24.62, "HotpotQA": 48.46, "MSMARCO": 18.81, "NFCorpus": 26.81, "NQ": 33.21, "QuoraRetrieval": 86.15, "SCIDOCS": 10.0, "SciFact": 64.48, "TRECCOVID": 60.67, "Touche2020": 10.18 } ] }, "STS": { "spearman": [ { "Model": "LLM2Vec-Llama-2-unsupervised", "BIOSSES": 82.41, "SICK-R": 71.77, "STS12": 65.39, "STS13": 79.26, "STS14": 72.98, "STS15": 82.72, "STS16": 81.02, "STS17 (en-en)": 86.7, "STS22 (en)": 63.47, "STSBenchmark": 78.32 } ] }, "Summarization": { "spearman": [ { "Model": "LLM2Vec-Llama-2-unsupervised", "SummEval": 31.38 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "LLM2Vec-Llama-2-unsupervised" } ] } }, "GritLM-7B-noinstruct": { "BitextMining": { "f1": [ { "Model": "GritLM-7B-noinstruct" } ] }, "Classification": { "accuracy": [ { "Model": "GritLM-7B-noinstruct" } ] }, "Clustering": { "v_measure": [ { "Model": "GritLM-7B-noinstruct" } ] }, "PairClassification": { "ap": [ { "Model": "GritLM-7B-noinstruct" } ] }, "Reranking": { "map": [ { "Model": "GritLM-7B-noinstruct" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "GritLM-7B-noinstruct", "ARCChallenge": 16.57, "AlphaNLI": 29.56, "HellaSwag": 36.03, "PIQA": 35.8, "Quail": 8.68, "RARbCode": 83.14, "RARbMath": 83.01, "SIQA": 5.73, "SpartQA": 1.56, "TempReasonL1": 2.57, "TempReasonL2Fact": 48.25, "TempReasonL2Pure": 8.98, "TempReasonL3Fact": 34.11, "TempReasonL3Pure": 12.44, "WinoGrande": 52.12 } ] }, "STS": { "spearman": [ { "Model": "GritLM-7B-noinstruct" } ] }, "Summarization": { "spearman": [ { "Model": "GritLM-7B-noinstruct" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "GritLM-7B-noinstruct" } ] } }, "voyage-law-2": { "BitextMining": { "f1": [ { "Model": "voyage-law-2" } ] }, "Classification": { "accuracy": [ { "Model": "voyage-law-2", "AmazonReviewsClassification (fr)": 41.98, "MTOPDomainClassification (fr)": 90.12, "MTOPIntentClassification (fr)": 62.44, "MasakhaNEWSClassification (fra)": 76.42, "MassiveIntentClassification (fr)": 66.94, "MassiveScenarioClassification (fr)": 72.78 } ] }, "Clustering": { "v_measure": [ { "Model": "voyage-law-2", "AlloProfClusteringP2P": 62.5, "AlloProfClusteringS2S": 44.28, "HALClusteringS2S": 26.36, "MLSUMClusteringP2P (fr)": 44.03, "MLSUMClusteringS2S (fr)": 42.95, "MasakhaNEWSClusteringP2P (fra)": 50.68, "MasakhaNEWSClusteringS2S (fra)": 38.79 } ] }, "PairClassification": { "ap": [ { "Model": "voyage-law-2", "OpusparcusPC (fr)": 93.06, "PawsXPairClassification (fr)": 61.54 } ] }, "Reranking": { "map": [ { "Model": "voyage-law-2", "AlloprofReranking": 72.92, "SyntecReranking": 91.2 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "voyage-law-2", "AILACasedocs": 44.56, "AILAStatutes": 45.51, "AlloprofRetrieval": 57.28, "BSARDRetrieval": 11.83, "GerDaLIRSmall": 44.91, "LEMBNarrativeQARetrieval": 55.78, "LEMBNeedleRetrieval": 80.5, "LEMBPasskeyRetrieval": 93.75, "LEMBQMSumRetrieval": 57.26, "LEMBSummScreenFDRetrieval": 98.72, "LEMBWikimQARetrieval": 87.08, "LeCaRDv2": 72.75, "LegalBenchConsumerContractsQA": 83.27, "LegalBenchCorporateLobbying": 95.66, "LegalQuAD": 67.47, "LegalSummarization": 68.96, "MintakaRetrieval (fr)": 34.92, "SyntecRetrieval": 87.33, "XPQARetrieval (fr)": 73.56 } ] }, "STS": { "spearman": [ { "Model": "voyage-law-2", "SICKFr": 74.09, "STS22 (fr)": 83.75, "STSBenchmarkMultilingualSTS (fr)": 83.02 } ] }, "Summarization": { "spearman": [ { "Model": "voyage-law-2", "SummEvalFr": 30.34 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "voyage-law-2" } ] } }, "Cohere-embed-english-v3.0": { "BitextMining": { "f1": [ { "Model": "Cohere-embed-english-v3.0" } ] }, "Classification": { "accuracy": [ { "Model": "Cohere-embed-english-v3.0" } ] }, "Clustering": { "v_measure": [ { "Model": "Cohere-embed-english-v3.0" } ] }, "PairClassification": { "ap": [ { "Model": "Cohere-embed-english-v3.0" } ] }, "Reranking": { "map": [ { "Model": "Cohere-embed-english-v3.0" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "Cohere-embed-english-v3.0", "AILACasedocs": 31.54, "AILAStatutes": 27.15, "ARCChallenge": 9.89, "AlphaNLI": 15.1, "BrightRetrieval (psychology)": 21.82, "BrightRetrieval (economics)": 20.18, "BrightRetrieval (robotics)": 16.21, "BrightRetrieval (biology)": 18.98, "BrightRetrieval (stackoverflow)": 16.47, "BrightRetrieval (theoremqa_theorems)": 6.04, "BrightRetrieval (pony)": 1.77, "BrightRetrieval (sustainable_living)": 17.69, "BrightRetrieval (aops)": 6.46, "BrightRetrieval (theoremqa_questions)": 15.07, "BrightRetrieval (leetcode)": 26.78, "BrightRetrieval (earth_science)": 27.45, "GerDaLIRSmall": 6.05, "HellaSwag": 26.35, "LeCaRDv2": 21.02, "LegalBenchConsumerContractsQA": 77.12, "LegalBenchCorporateLobbying": 93.68, "LegalQuAD": 26.08, "LegalSummarization": 61.7, "PIQA": 28.49, "Quail": 4.1, "RARbCode": 57.19, "RARbMath": 72.26, "SIQA": 4.26, "SpartQA": 3.75, "TempReasonL1": 1.5, "TempReasonL2Fact": 35.91, "TempReasonL2Pure": 1.89, "TempReasonL3Fact": 27.51, "TempReasonL3Pure": 8.53, "WinoGrande": 58.01 } ] }, "STS": { "spearman": [ { "Model": "Cohere-embed-english-v3.0" } ] }, "Summarization": { "spearman": [ { "Model": "Cohere-embed-english-v3.0" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "Cohere-embed-english-v3.0", "Core17InstructionRetrieval": 2.8, "News21InstructionRetrieval": 0.2, "Robust04InstructionRetrieval": -3.63 } ] } }, "Baichuan-text-embedding": { "BitextMining": { "f1": [ { "Model": "Baichuan-text-embedding" } ] }, "Classification": { "accuracy": [ { "Model": "Baichuan-text-embedding", "AmazonReviewsClassification (zh)": 48.3, "IFlyTek": 50.75, "JDReview": 87.69, "MassiveIntentClassification (zh-CN)": 74.91, "MassiveScenarioClassification (zh-CN)": 81.28, "MultilingualSentiment": 76.83, "OnlineShopping": 94.42, "TNews": 52.62, "Waimai": 88.77 } ] }, "Clustering": { "v_measure": [ { "Model": "Baichuan-text-embedding", "CLSClusteringP2P": 60.37, "CLSClusteringS2S": 51.09, "ThuNewsClusteringP2P": 58.23, "ThuNewsClusteringS2S": 57.83 } ] }, "PairClassification": { "ap": [ { "Model": "Baichuan-text-embedding", "Cmnli": 85.31, "Ocnli": 79.33 } ] }, "Reranking": { "map": [ { "Model": "Baichuan-text-embedding", "CMedQAv1": 88.06, "CMedQAv2": 88.46, "MMarcoReranking": 34.3, "T2Reranking": 67.85 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "Baichuan-text-embedding", "CmedqaRetrieval": 47.64, "CovidRetrieval": 86.86, "DuRetrieval": 88.43, "EcomRetrieval": 66.39, "MMarcoRetrieval": 80.17, "MedicalRetrieval": 61.1, "T2Retrieval": 80.11, "VideoRetrieval": 74.28 } ] }, "STS": { "spearman": [ { "Model": "Baichuan-text-embedding", "AFQMC": 50.8, "ATEC": 53.23, "BQ": 66.49, "LCQMC": 76.6, "PAWSX": 47.56, "QBQTC": 39.96, "STS22 (zh)": 65.78, "STSB": 80.14 } ] }, "Summarization": { "spearman": [ { "Model": "Baichuan-text-embedding" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "Baichuan-text-embedding" } ] } }, "flaubert_large_cased": { "BitextMining": { "f1": [ { "Model": "flaubert_large_cased" } ] }, "Classification": { "accuracy": [ { "Model": "flaubert_large_cased", "AmazonReviewsClassification (fr)": 22.45, "MTOPDomainClassification (fr)": 24.27, "MTOPIntentClassification (fr)": 9.79, "MasakhaNEWSClassification (fra)": 55.64, "MassiveIntentClassification (fr)": 16.41, "MassiveScenarioClassification (fr)": 22.72 } ] }, "Clustering": { "v_measure": [ { "Model": "flaubert_large_cased", "AlloProfClusteringP2P": 40.85, "AlloProfClusteringS2S": 21.76, "HALClusteringS2S": 5.26, "MLSUMClusteringP2P": 38.09, "MLSUMClusteringS2S": 18.71, "MasakhaNEWSClusteringP2P (fra)": 26.43, "MasakhaNEWSClusteringS2S (fra)": 24.68 } ] }, "PairClassification": { "ap": [ { "Model": "flaubert_large_cased", "OpusparcusPC (fr)": 74.78, "PawsXPairClassification (fr)": 54.14 } ] }, "Reranking": { "map": [ { "Model": "flaubert_large_cased", "AlloprofReranking": 26.29, "SyntecReranking": 42.8 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "flaubert_large_cased", "AlloprofRetrieval": 0.58, "BSARDRetrieval": 0.0, "MintakaRetrieval (fr)": 0.26, "SyntecRetrieval": 1.58, "XPQARetrieval (fr)": 3.69 } ] }, "STS": { "spearman": [ { "Model": "flaubert_large_cased", "SICKFr": 34.6, "STS22 (fr)": 48.52, "STSBenchmarkMultilingualSTS (fr)": 15.66 } ] }, "Summarization": { "spearman": [ { "Model": "flaubert_large_cased", "SummEvalFr": 29.25 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "flaubert_large_cased" } ] } }, "dragon-plus": { "BitextMining": { "f1": [ { "Model": "dragon-plus" } ] }, "Classification": { "accuracy": [ { "Model": "dragon-plus" } ] }, "Clustering": { "v_measure": [ { "Model": "dragon-plus" } ] }, "PairClassification": { "ap": [ { "Model": "dragon-plus" } ] }, "Reranking": { "map": [ { "Model": "dragon-plus" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "dragon-plus", "ARCChallenge": 8.91, "AlphaNLI": 32.1, "HellaSwag": 27.69, "PIQA": 28.01, "Quail": 4.09, "RARbCode": 17.58, "RARbMath": 45.09, "SIQA": 2.0, "SpartQA": 10.34, "TempReasonL1": 1.82, "TempReasonL2Fact": 17.45, "TempReasonL2Pure": 0.55, "TempReasonL3Fact": 15.71, "TempReasonL3Pure": 7.97, "WinoGrande": 67.18 } ] }, "STS": { "spearman": [ { "Model": "dragon-plus" } ] }, "Summarization": { "spearman": [ { "Model": "dragon-plus" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "dragon-plus" } ] } }, "text2vec-base-multilingual": { "BitextMining": { "f1": [ { "Model": "text2vec-base-multilingual" } ] }, "Classification": { "accuracy": [ { "Model": "text2vec-base-multilingual", "AmazonReviewsClassification (fr)": 34.25, "MTOPDomainClassification (fr)": 71.83, "MTOPIntentClassification (fr)": 44.53, "MasakhaNEWSClassification (fra)": 73.84, "MassiveIntentClassification (fr)": 51.93, "MassiveScenarioClassification (fr)": 58.31 } ] }, "Clustering": { "v_measure": [ { "Model": "text2vec-base-multilingual", "AlloProfClusteringP2P": 49.11, "AlloProfClusteringS2S": 32.72, "HALClusteringS2S": 16.19, "MLSUMClusteringP2P": 36.19, "MLSUMClusteringS2S": 30.39, "MasakhaNEWSClusteringP2P (fra)": 38.51, "MasakhaNEWSClusteringS2S (fra)": 32.51 } ] }, "PairClassification": { "ap": [ { "Model": "text2vec-base-multilingual", "OpusparcusPC (fr)": 92.04, "PawsXPairClassification (fr)": 65.57 } ] }, "Reranking": { "map": [ { "Model": "text2vec-base-multilingual", "AlloprofReranking": 51.48, "SyntecReranking": 70.28 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "text2vec-base-multilingual", "AlloprofRetrieval": 18.9, "BSARDRetrieval": 0.0, "MintakaRetrieval (fr)": 14.81, "SyntecRetrieval": 49.69, "XPQARetrieval (fr)": 40.4 } ] }, "STS": { "spearman": [ { "Model": "text2vec-base-multilingual", "SICKFr": 77.25, "STS22 (fr)": 74.1, "STSBenchmarkMultilingualSTS (fr)": 83.48 } ] }, "Summarization": { "spearman": [ { "Model": "text2vec-base-multilingual", "SummEvalFr": 29.33 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "text2vec-base-multilingual" } ] } }, "mistral-embed": { "BitextMining": { "f1": [ { "Model": "mistral-embed" } ] }, "Classification": { "accuracy": [ { "Model": "mistral-embed", "AmazonReviewsClassification (fr)": 41.59, "MTOPDomainClassification (fr)": 90.05, "MTOPIntentClassification (fr)": 66.09, "MasakhaNEWSClassification (fra)": 81.4, "MassiveIntentClassification (fr)": 62.83, "MassiveScenarioClassification (fr)": 69.71 } ] }, "Clustering": { "v_measure": [ { "Model": "mistral-embed", "AlloProfClusteringP2P": 62.01, "AlloProfClusteringS2S": 49.2, "HALClusteringS2S": 26.17, "MLSUMClusteringP2P": 45.28, "MLSUMClusteringS2S": 42.74, "MasakhaNEWSClusteringP2P (fra)": 48.13, "MasakhaNEWSClusteringS2S (fra)": 39.62 } ] }, "PairClassification": { "ap": [ { "Model": "mistral-embed", "OpusparcusPC (fr)": 92.61, "PawsXPairClassification (fr)": 62.02 } ] }, "Reranking": { "map": [ { "Model": "mistral-embed", "AlloprofReranking": 72.36, "SyntecReranking": 88.57 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "mistral-embed", "AILACasedocs": 38.2, "AILAStatutes": 44.81, "AlloprofRetrieval": 56.84, "BSARDRetrieval": 2.48, "GerDaLIRSmall": 17.85, "LeCaRDv2": 61.12, "LegalBenchConsumerContractsQA": 80.8, "LegalBenchCorporateLobbying": 94.11, "LegalQuAD": 47.17, "LegalSummarization": 67.39, "MintakaRetrieval (fr)": 21.73, "SyntecRetrieval": 78.77, "XPQARetrieval (fr)": 74.24 } ] }, "STS": { "spearman": [ { "Model": "mistral-embed", "SICKFr": 76.21, "STS22 (fr)": 82.74, "STSBenchmarkMultilingualSTS (fr)": 79.72 } ] }, "Summarization": { "spearman": [ { "Model": "mistral-embed", "SummEvalFr": 31.47 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "mistral-embed" } ] } }, "text-similarity-davinci-001": { "BitextMining": { "f1": [ { "Model": "text-similarity-davinci-001" } ] }, "Classification": { "accuracy": [ { "Model": "text-similarity-davinci-001" } ] }, "Clustering": { "v_measure": [ { "Model": "text-similarity-davinci-001", "RedditClustering": 31.78, "StackExchangeClustering": 36.86, "TwentyNewsgroupsClustering": 29.33 } ] }, "PairClassification": { "ap": [ { "Model": "text-similarity-davinci-001", "SprintDuplicateQuestions": 69.52, "TwitterSemEval2015": 74.42, "TwitterURLCorpus": 83.75 } ] }, "Reranking": { "map": [ { "Model": "text-similarity-davinci-001", "AskUbuntuDupQuestions": 53.56, "SciDocsRR": 68.7, "StackOverflowDupQuestions": 39.41 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "text-similarity-davinci-001" } ] }, "STS": { "spearman": [ { "Model": "text-similarity-davinci-001", "BIOSSES": 68.95, "SICK-R": 78.72, "STSBenchmark": 84.08 } ] }, "Summarization": { "spearman": [ { "Model": "text-similarity-davinci-001" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "text-similarity-davinci-001" } ] } }, "monot5-base-msmarco-10k": { "BitextMining": { "f1": [ { "Model": "monot5-base-msmarco-10k" } ] }, "Classification": { "accuracy": [ { "Model": "monot5-base-msmarco-10k" } ] }, "Clustering": { "v_measure": [ { "Model": "monot5-base-msmarco-10k" } ] }, "PairClassification": { "ap": [ { "Model": "monot5-base-msmarco-10k" } ] }, "Reranking": { "map": [ { "Model": "monot5-base-msmarco-10k" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "monot5-base-msmarco-10k" } ] }, "STS": { "spearman": [ { "Model": "monot5-base-msmarco-10k" } ] }, "Summarization": { "spearman": [ { "Model": "monot5-base-msmarco-10k" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "monot5-base-msmarco-10k", "Core17InstructionRetrieval": -4.06, "News21InstructionRetrieval": 5.02, "Robust04InstructionRetrieval": -6.2 } ] } }, "nomic-embed-text-v1.5-512": { "BitextMining": { "f1": [ { "Model": "nomic-embed-text-v1.5-512" } ] }, "Classification": { "accuracy": [ { "Model": "nomic-embed-text-v1.5-512", "AmazonCounterfactualClassification (en)": 74.27, "AmazonPolarityClassification": 91.89, "AmazonReviewsClassification (en)": 46.97, "Banking77Classification": 84.15, "EmotionClassification": 47.73, "ImdbClassification": 85.47, "MTOPDomainClassification (en)": 92.62, "MTOPIntentClassification (en)": 74.27, "MassiveIntentClassification (en)": 73.07, "MassiveScenarioClassification (en)": 76.82, "ToxicConversationsClassification": 71.25, "TweetSentimentExtractionClassification": 60.4 } ] }, "Clustering": { "v_measure": [ { "Model": "nomic-embed-text-v1.5-512", "ArxivClusteringP2P": 45.45, "ArxivClusteringS2S": 36.19, "BiorxivClusteringP2P": 38.41, "BiorxivClusteringS2S": 32.28, "MedrxivClusteringP2P": 34.47, "MedrxivClusteringS2S": 31.43, "RedditClustering": 55.9, "RedditClusteringP2P": 60.58, "StackExchangeClustering": 62.94, "StackExchangeClusteringP2P": 33.81, "TwentyNewsgroupsClustering": 49.36 } ] }, "PairClassification": { "ap": [ { "Model": "nomic-embed-text-v1.5-512", "SprintDuplicateQuestions": 92.91, "TwitterSemEval2015": 74.3, "TwitterURLCorpus": 86.57 } ] }, "Reranking": { "map": [ { "Model": "nomic-embed-text-v1.5-512", "AskUbuntuDupQuestions": 61.6, "MindSmallReranking": 30.34, "SciDocsRR": 80.33, "StackOverflowDupQuestions": 50.32 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "nomic-embed-text-v1.5-512", "ArguAna": 47.45, "CQADupstackRetrieval": 39.06, "ClimateFEVER": 40.7, "DBPedia": 42.96, "FEVER": 85.7, "FiQA2018": 36.92, "HotpotQA": 71.48, "MSMARCO": 42.29, "NFCorpus": 33.31, "NQ": 58.83, "QuoraRetrieval": 87.87, "SCIDOCS": 17.88, "SciFact": 70.12, "TRECCOVID": 82.12, "Touche2020": 29.24 } ] }, "STS": { "spearman": [ { "Model": "nomic-embed-text-v1.5-512", "BIOSSES": 83.3, "SICK-R": 79.27, "STS12": 78.3, "STS13": 85.81, "STS14": 81.38, "STS15": 86.79, "STS16": 84.56, "STS17 (en-en)": 87.25, "STS22 (en)": 65.24, "STSBenchmark": 85.14 } ] }, "Summarization": { "spearman": [ { "Model": "nomic-embed-text-v1.5-512", "SummEval": 30.47 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "nomic-embed-text-v1.5-512" } ] } }, "bge-base-zh-v1.5": { "BitextMining": { "f1": [ { "Model": "bge-base-zh-v1.5" } ] }, "Classification": { "accuracy": [ { "Model": "bge-base-zh-v1.5", "AmazonReviewsClassification (zh)": 40.15, "IFlyTek": 48.62, "JDReview": 83.62, "MassiveIntentClassification (zh-CN)": 67.93, "MassiveScenarioClassification (zh-CN)": 73.98, "MultilingualSentiment": 70.67, "OnlineShopping": 91.26, "TNews": 51.08, "Waimai": 85.36 } ] }, "Clustering": { "v_measure": [ { "Model": "bge-base-zh-v1.5", "CLSClusteringP2P": 39.91, "CLSClusteringS2S": 37.63, "ThuNewsClusteringP2P": 58.45, "ThuNewsClusteringS2S": 54.12 } ] }, "PairClassification": { "ap": [ { "Model": "bge-base-zh-v1.5", "Cmnli": 84.1, "Ocnli": 75.41 } ] }, "Reranking": { "map": [ { "Model": "bge-base-zh-v1.5", "CMedQAv1": 80.47, "CMedQAv2": 84.88, "MMarcoReranking": 29.74, "T2Reranking": 66.49 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "bge-base-zh-v1.5", "CmedqaRetrieval": 41.61, "CovidRetrieval": 74.7, "DuRetrieval": 85.07, "EcomRetrieval": 64.25, "MMarcoRetrieval": 77.69, "MedicalRetrieval": 56.51, "T2Retrieval": 83.71, "VideoRetrieval": 72.35 } ] }, "STS": { "spearman": [ { "Model": "bge-base-zh-v1.5", "AFQMC": 42.4, "ATEC": 48.17, "BQ": 61.78, "LCQMC": 74.45, "PAWSX": 20.4, "QBQTC": 36.22, "STS22 (zh)": 68.01, "STSB": 78.31 } ] }, "Summarization": { "spearman": [ { "Model": "bge-base-zh-v1.5" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "bge-base-zh-v1.5" } ] } }, "komninos": { "BitextMining": { "f1": [ { "Model": "komninos", "BUCC (de-en)": 0.18, "BUCC (fr-en)": 0.08, "BUCC (ru-en)": 0.15, "BUCC (zh-en)": 0.05, "Tatoeba (afr-eng)": 4.82, "Tatoeba (amh-eng)": 1.18, "Tatoeba (ang-eng)": 8.54, "Tatoeba (ara-eng)": 0.63, "Tatoeba (arq-eng)": 0.4, "Tatoeba (arz-eng)": 0.63, "Tatoeba (ast-eng)": 11.69, "Tatoeba (awa-eng)": 0.0, "Tatoeba (aze-eng)": 3.22, "Tatoeba (bel-eng)": 1.75, "Tatoeba (ben-eng)": 0.2, "Tatoeba (ber-eng)": 7.0, "Tatoeba (bos-eng)": 9.31, "Tatoeba (bre-eng)": 4.17, "Tatoeba (bul-eng)": 1.29, "Tatoeba (cat-eng)": 7.73, "Tatoeba (cbk-eng)": 5.61, "Tatoeba (ceb-eng)": 4.88, "Tatoeba (ces-eng)": 3.55, "Tatoeba (cha-eng)": 19.29, "Tatoeba (cmn-eng)": 0.5, "Tatoeba (cor-eng)": 4.15, "Tatoeba (csb-eng)": 5.69, "Tatoeba (cym-eng)": 8.4, "Tatoeba (dan-eng)": 6.99, "Tatoeba (deu-eng)": 3.67, "Tatoeba (dsb-eng)": 5.33, "Tatoeba (dtp-eng)": 4.25, "Tatoeba (ell-eng)": 0.63, "Tatoeba (epo-eng)": 2.45, "Tatoeba (est-eng)": 2.69, "Tatoeba (eus-eng)": 4.69, "Tatoeba (fao-eng)": 7.61, "Tatoeba (fin-eng)": 3.36, "Tatoeba (fra-eng)": 7.0, "Tatoeba (fry-eng)": 12.36, "Tatoeba (gla-eng)": 3.07, "Tatoeba (gle-eng)": 4.81, "Tatoeba (glg-eng)": 8.12, "Tatoeba (gsw-eng)": 18.87, "Tatoeba (heb-eng)": 0.68, "Tatoeba (hin-eng)": 0.1, "Tatoeba (hrv-eng)": 5.41, "Tatoeba (hsb-eng)": 6.32, "Tatoeba (hun-eng)": 3.42, "Tatoeba (hye-eng)": 0.97, "Tatoeba (ido-eng)": 7.1, "Tatoeba (ile-eng)": 13.61, "Tatoeba (ina-eng)": 8.57, "Tatoeba (ind-eng)": 7.26, "Tatoeba (isl-eng)": 4.09, "Tatoeba (ita-eng)": 5.54, "Tatoeba (jav-eng)": 11.43, "Tatoeba (jpn-eng)": 0.2, "Tatoeba (kab-eng)": 2.71, "Tatoeba (kat-eng)": 1.11, "Tatoeba (kaz-eng)": 1.17, "Tatoeba (khm-eng)": 0.55, "Tatoeba (kor-eng)": 0.5, "Tatoeba (kur-eng)": 8.55, "Tatoeba (kzj-eng)": 4.61, "Tatoeba (lat-eng)": 4.07, "Tatoeba (lfn-eng)": 2.83, "Tatoeba (lit-eng)": 0.95, "Tatoeba (lvs-eng)": 3.25, "Tatoeba (mal-eng)": 0.29, "Tatoeba (mar-eng)": 0.2, "Tatoeba (max-eng)": 14.53, "Tatoeba (mhr-eng)": 0.2, "Tatoeba (mkd-eng)": 0.2, "Tatoeba (mon-eng)": 1.1, "Tatoeba (nds-eng)": 10.37, "Tatoeba (nld-eng)": 9.5, "Tatoeba (nno-eng)": 4.49, "Tatoeba (nob-eng)": 4.95, "Tatoeba (nov-eng)": 14.53, "Tatoeba (oci-eng)": 5.8, "Tatoeba (orv-eng)": 0.24, "Tatoeba (pam-eng)": 6.65, "Tatoeba (pes-eng)": 0.5, "Tatoeba (pms-eng)": 8.05, "Tatoeba (pol-eng)": 5.13, "Tatoeba (por-eng)": 5.87, "Tatoeba (ron-eng)": 6.76, "Tatoeba (rus-eng)": 0.2, "Tatoeba (slk-eng)": 4.23, "Tatoeba (slv-eng)": 6.05, "Tatoeba (spa-eng)": 5.03, "Tatoeba (sqi-eng)": 4.36, "Tatoeba (srp-eng)": 1.77, "Tatoeba (swe-eng)": 6.72, "Tatoeba (swg-eng)": 8.54, "Tatoeba (swh-eng)": 11.49, "Tatoeba (tam-eng)": 1.3, "Tatoeba (tat-eng)": 0.77, "Tatoeba (tel-eng)": 0.85, "Tatoeba (tgl-eng)": 2.61, "Tatoeba (tha-eng)": 0.69, "Tatoeba (tuk-eng)": 5.76, "Tatoeba (tur-eng)": 5.24, "Tatoeba (tzl-eng)": 15.51, "Tatoeba (uig-eng)": 0.6, "Tatoeba (ukr-eng)": 1.23, "Tatoeba (urd-eng)": 0.4, "Tatoeba (uzb-eng)": 4.73, "Tatoeba (vie-eng)": 6.55, "Tatoeba (war-eng)": 4.12, "Tatoeba (wuu-eng)": 0.2, "Tatoeba (xho-eng)": 4.33, "Tatoeba (yid-eng)": 0.59, "Tatoeba (yue-eng)": 0.5, "Tatoeba (zsm-eng)": 7.27 } ] }, "Classification": { "accuracy": [ { "Model": "komninos", "AmazonCounterfactualClassification (en)": 60.54, "AmazonPolarityClassification": 59.59, "AmazonReviewsClassification (en)": 31.01, "Banking77Classification": 67.05, "EmotionClassification": 33.18, "ImdbClassification": 63.98, "MTOPDomainClassification (en)": 78.57, "MTOPIntentClassification (en)": 57.07, "MassiveIntentClassification (en)": 57.21, "MassiveScenarioClassification (en)": 66.11, "ToxicConversationsClassification": 67.76, "TweetSentimentExtractionClassification": 49.68 } ] }, "Clustering": { "v_measure": [ { "Model": "komninos", "ArxivClusteringP2P": 34.73, "ArxivClusteringS2S": 26.01, "BiorxivClusteringP2P": 29.76, "BiorxivClusteringS2S": 20.71, "BlurbsClusteringP2P": 11.37, "BlurbsClusteringS2S": 8.01, "MedrxivClusteringP2P": 26.65, "MedrxivClusteringS2S": 21.5, "RedditClustering": 28.84, "RedditClusteringP2P": 7.37, "StackExchangeClustering": 39.04, "StackExchangeClusteringP2P": 30.23, "TenKGnadClusteringP2P": 15.89, "TenKGnadClusteringS2S": 4.84, "TwentyNewsgroupsClustering": 27.42 } ] }, "PairClassification": { "ap": [ { "Model": "komninos", "SprintDuplicateQuestions": 85.55, "TwitterSemEval2015": 53.85, "TwitterURLCorpus": 79.41 } ] }, "Reranking": { "map": [ { "Model": "komninos", "AskUbuntuDupQuestions": 50.88, "MindSmallReranking": 28.92, "SciDocsRR": 63.55, "StackOverflowDupQuestions": 35.65 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "komninos", "ArguAna": 30.96, "CQADupstackRetrieval": 16.79, "ClimateFEVER": 14.87, "DBPedia": 15.88, "FEVER": 15.56, "FiQA2018": 10.49, "HotpotQA": 20.77, "MSMARCO": 9.75, "NFCorpus": 11.79, "NQ": 12.75, "QuoraRetrieval": 71.57, "SCIDOCS": 8.47, "SciFact": 29.53, "TRECCOVID": 35.92, "Touche2020": 13.17 } ] }, "STS": { "spearman": [ { "Model": "komninos", "BIOSSES": 50.25, "SICK-R": 55.49, "STS12": 53.51, "STS13": 70.8, "STS14": 63.56, "STS15": 74.08, "STS16": 64.6, "STS17 (ar-ar)": 13.78, "STS17 (en-ar)": 9.08, "STS17 (en-de)": -3.11, "STS17 (en-en)": 76.91, "STS17 (en-tr)": -0.45, "STS17 (es-en)": -8.18, "STS17 (es-es)": 48.23, "STS17 (fr-en)": 5.81, "STS17 (it-en)": 3.64, "STS17 (ko-ko)": 2.54, "STS17 (nl-en)": 0.44, "STS22 (ar)": 32.42, "STS22 (de)": 33.04, "STS22 (de-en)": 28.65, "STS22 (de-fr)": 14.77, "STS22 (de-pl)": 11.21, "STS22 (en)": 53.89, "STS22 (es)": 48.53, "STS22 (es-en)": 26.97, "STS22 (es-it)": 41.1, "STS22 (fr)": 49.43, "STS22 (fr-pl)": 39.44, "STS22 (it)": 57.77, "STS22 (pl)": 12.47, "STS22 (pl-en)": 45.55, "STS22 (ru)": 19.44, "STS22 (tr)": 47.38, "STS22 (zh)": 4.78, "STS22 (zh-en)": 14.05, "STSBenchmark": 61.55 } ] }, "Summarization": { "spearman": [ { "Model": "komninos", "SummEval": 30.49 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "komninos" } ] } }, "text-search-ada-001": { "BitextMining": { "f1": [ { "Model": "text-search-ada-001" } ] }, "Classification": { "accuracy": [ { "Model": "text-search-ada-001" } ] }, "Clustering": { "v_measure": [ { "Model": "text-search-ada-001", "BiorxivClusteringS2S": 26.05, "MedrxivClusteringS2S": 25.67, "TwentyNewsgroupsClustering": 44.92 } ] }, "PairClassification": { "ap": [ { "Model": "text-search-ada-001" } ] }, "Reranking": { "map": [ { "Model": "text-search-ada-001" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "text-search-ada-001", "ArguAna": 46.91, "ClimateFEVER": 18.5, "DBPedia": 36.2, "FEVER": 72.1, "FiQA2018": 38.41, "HotpotQA": 59.39, "MSMARCO": 37.94, "NFCorpus": 33.17, "NQ": 42.81, "QuoraRetrieval": 70.57, "SCIDOCS": 14.83, "SciFact": 67.25, "TRECCOVID": 72.43, "Touche2020": 28.68 } ] }, "STS": { "spearman": [ { "Model": "text-search-ada-001" } ] }, "Summarization": { "spearman": [ { "Model": "text-search-ada-001" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "text-search-ada-001" } ] } }, "text-embedding-3-small-instruct": { "BitextMining": { "f1": [ { "Model": "text-embedding-3-small-instruct" } ] }, "Classification": { "accuracy": [ { "Model": "text-embedding-3-small-instruct" } ] }, "Clustering": { "v_measure": [ { "Model": "text-embedding-3-small-instruct" } ] }, "PairClassification": { "ap": [ { "Model": "text-embedding-3-small-instruct" } ] }, "Reranking": { "map": [ { "Model": "text-embedding-3-small-instruct" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "text-embedding-3-small-instruct", "ARCChallenge": 13.76, "AlphaNLI": 21.14, "HellaSwag": 27.2, "PIQA": 29.59, "Quail": 6.64, "RARbCode": 72.14, "RARbMath": 64.31, "SIQA": 2.98, "SpartQA": 3.58, "TempReasonL1": 2.29, "TempReasonL2Fact": 26.34, "TempReasonL2Pure": 3.17, "TempReasonL3Fact": 22.72, "TempReasonL3Pure": 9.98, "WinoGrande": 25.49 } ] }, "STS": { "spearman": [ { "Model": "text-embedding-3-small-instruct" } ] }, "Summarization": { "spearman": [ { "Model": "text-embedding-3-small-instruct" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "text-embedding-3-small-instruct" } ] } }, "gte-Qwen1.5-7B-instruct": { "BitextMining": { "f1": [ { "Model": "gte-Qwen1.5-7B-instruct" } ] }, "Classification": { "accuracy": [ { "Model": "gte-Qwen1.5-7B-instruct", "AmazonCounterfactualClassification (en)": 83.16, "AmazonPolarityClassification": 96.7, "AmazonReviewsClassification (en)": 62.17, "AmazonReviewsClassification (zh)": 52.95, "Banking77Classification": 81.68, "EmotionClassification": 54.53, "IFlyTek": 53.77, "ImdbClassification": 95.58, "JDReview": 88.2, "MTOPDomainClassification (en)": 95.75, "MTOPIntentClassification (en)": 84.26, "MassiveIntentClassification (zh-CN)": 76.25, "MassiveIntentClassification (en)": 78.47, "MassiveScenarioClassification (en)": 78.19, "MassiveScenarioClassification (zh-CN)": 77.26, "MultilingualSentiment": 77.42, "OnlineShopping": 94.48, "TNews": 51.24, "ToxicConversationsClassification": 78.75, "TweetSentimentExtractionClassification": 66.0, "Waimai": 88.63 } ] }, "Clustering": { "v_measure": [ { "Model": "gte-Qwen1.5-7B-instruct", "ArxivClusteringP2P": 56.4, "ArxivClusteringS2S": 51.45, "BiorxivClusteringP2P": 49.01, "BiorxivClusteringS2S": 45.06, "CLSClusteringP2P": 47.21, "CLSClusteringS2S": 45.79, "MedrxivClusteringP2P": 44.37, "MedrxivClusteringS2S": 42.0, "RedditClustering": 73.37, "RedditClusteringP2P": 72.51, "StackExchangeClustering": 79.07, "StackExchangeClusteringP2P": 49.57, "ThuNewsClusteringP2P": 87.43, "ThuNewsClusteringS2S": 87.9, "TwentyNewsgroupsClustering": 51.31 } ] }, "PairClassification": { "ap": [ { "Model": "gte-Qwen1.5-7B-instruct", "Cmnli": 91.81, "Ocnli": 85.22, "SprintDuplicateQuestions": 95.99, "TwitterSemEval2015": 79.36, "TwitterURLCorpus": 86.79 } ] }, "Reranking": { "map": [ { "Model": "gte-Qwen1.5-7B-instruct", "AskUbuntuDupQuestions": 66.0, "CMedQAv1": 86.37, "CMedQAv2": 87.41, "MindSmallReranking": 32.71, "SciDocsRR": 87.89, "StackOverflowDupQuestions": 53.93, "T2Reranking": 68.11 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "gte-Qwen1.5-7B-instruct", "ArguAna": 62.65, "BrightRetrieval (stackoverflow)": 19.85, "BrightRetrieval (earth_science)": 36.22, "BrightRetrieval (leetcode)": 25.46, "BrightRetrieval (theoremqa_questions)": 26.97, "BrightRetrieval (economics)": 17.72, "BrightRetrieval (robotics)": 13.47, "BrightRetrieval (pony)": 9.79, "BrightRetrieval (aops)": 14.36, "BrightRetrieval (psychology)": 24.61, "BrightRetrieval (theoremqa_theorems)": 26.66, "BrightRetrieval (biology)": 30.92, "BrightRetrieval (sustainable_living)": 14.93, "CQADupstackRetrieval": 40.64, "ClimateFEVER": 44.0, "CmedqaRetrieval": 43.47, "CovidRetrieval": 80.87, "DBPedia": 48.04, "DuRetrieval": 86.01, "EcomRetrieval": 66.46, "FEVER": 93.35, "FiQA2018": 55.31, "HotpotQA": 72.25, "MMarcoRetrieval": 73.83, "MSMARCO": 41.68, "MedicalRetrieval": 61.33, "NFCorpus": 38.25, "NQ": 61.79, "QuoraRetrieval": 89.61, "SCIDOCS": 27.69, "SciFact": 75.31, "T2Retrieval": 83.58, "TRECCOVID": 72.72, "Touche2020": 20.3, "VideoRetrieval": 69.41 } ] }, "STS": { "spearman": [ { "Model": "gte-Qwen1.5-7B-instruct", "AFQMC": 58.47, "ATEC": 55.46, "BIOSSES": 81.12, "BQ": 77.59, "LCQMC": 76.29, "PAWSX": 50.22, "QBQTC": 31.82, "SICK-R": 79.15, "STS12": 76.52, "STS13": 88.63, "STS14": 83.32, "STS15": 87.5, "STS16": 86.39, "STS17 (en-en)": 87.79, "STS22 (en)": 66.4, "STS22 (zh)": 67.36, "STSB": 81.37, "STSBenchmark": 87.35 } ] }, "Summarization": { "spearman": [ { "Model": "gte-Qwen1.5-7B-instruct", "SummEval": 31.46 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "gte-Qwen1.5-7B-instruct" } ] } }, "bge-large-en-v1.5-instruct": { "BitextMining": { "f1": [ { "Model": "bge-large-en-v1.5-instruct" } ] }, "Classification": { "accuracy": [ { "Model": "bge-large-en-v1.5-instruct" } ] }, "Clustering": { "v_measure": [ { "Model": "bge-large-en-v1.5-instruct" } ] }, "PairClassification": { "ap": [ { "Model": "bge-large-en-v1.5-instruct" } ] }, "Reranking": { "map": [ { "Model": "bge-large-en-v1.5-instruct" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "bge-large-en-v1.5-instruct", "ARCChallenge": 8.86, "AlphaNLI": 0.86, "HellaSwag": 26.24, "PIQA": 23.26, "Quail": 2.72, "RARbCode": 45.25, "RARbMath": 49.82, "SIQA": 0.59, "SpartQA": 2.34, "TempReasonL1": 1.17, "TempReasonL2Fact": 21.19, "TempReasonL2Pure": 2.1, "TempReasonL3Fact": 17.59, "TempReasonL3Pure": 5.99, "WinoGrande": 10.31 } ] }, "STS": { "spearman": [ { "Model": "bge-large-en-v1.5-instruct" } ] }, "Summarization": { "spearman": [ { "Model": "bge-large-en-v1.5-instruct" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "bge-large-en-v1.5-instruct" } ] } }, "text-embedding-3-large-instruct": { "BitextMining": { "f1": [ { "Model": "text-embedding-3-large-instruct" } ] }, "Classification": { "accuracy": [ { "Model": "text-embedding-3-large-instruct" } ] }, "Clustering": { "v_measure": [ { "Model": "text-embedding-3-large-instruct" } ] }, "PairClassification": { "ap": [ { "Model": "text-embedding-3-large-instruct" } ] }, "Reranking": { "map": [ { "Model": "text-embedding-3-large-instruct" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "text-embedding-3-large-instruct", "ARCChallenge": 21.22, "AlphaNLI": 34.23, "HellaSwag": 31.4, "PIQA": 37.52, "Quail": 13.6, "RARbCode": 89.41, "RARbMath": 87.73, "SIQA": 4.99, "SpartQA": 7.45, "TempReasonL1": 2.07, "TempReasonL2Fact": 39.77, "TempReasonL2Pure": 11.04, "TempReasonL3Fact": 37.04, "TempReasonL3Pure": 15.51, "WinoGrande": 33.92 } ] }, "STS": { "spearman": [ { "Model": "text-embedding-3-large-instruct" } ] }, "Summarization": { "spearman": [ { "Model": "text-embedding-3-large-instruct" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "text-embedding-3-large-instruct" } ] } }, "e5-mistral-7b-instruct-noinstruct": { "BitextMining": { "f1": [ { "Model": "e5-mistral-7b-instruct-noinstruct" } ] }, "Classification": { "accuracy": [ { "Model": "e5-mistral-7b-instruct-noinstruct" } ] }, "Clustering": { "v_measure": [ { "Model": "e5-mistral-7b-instruct-noinstruct" } ] }, "PairClassification": { "ap": [ { "Model": "e5-mistral-7b-instruct-noinstruct" } ] }, "Reranking": { "map": [ { "Model": "e5-mistral-7b-instruct-noinstruct" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "e5-mistral-7b-instruct-noinstruct", "ARCChallenge": 20.48, "AlphaNLI": 18.88, "HellaSwag": 32.25, "PIQA": 32.8, "Quail": 6.25, "RARbCode": 79.84, "RARbMath": 76.19, "SIQA": 5.08, "SpartQA": 10.87, "TempReasonL1": 3.04, "TempReasonL2Fact": 35.63, "TempReasonL2Pure": 9.32, "TempReasonL3Fact": 30.41, "TempReasonL3Pure": 14.39, "WinoGrande": 45.18 } ] }, "STS": { "spearman": [ { "Model": "e5-mistral-7b-instruct-noinstruct" } ] }, "Summarization": { "spearman": [ { "Model": "e5-mistral-7b-instruct-noinstruct" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "e5-mistral-7b-instruct-noinstruct" } ] } }, "bge-large-zh-noinstruct": { "BitextMining": { "f1": [ { "Model": "bge-large-zh-noinstruct" } ] }, "Classification": { "accuracy": [ { "Model": "bge-large-zh-noinstruct", "AmazonReviewsClassification (zh)": 41.94, "IFlyTek": 45.32, "JDReview": 85.38, "MassiveIntentClassification (zh-CN)": 66.96, "MassiveScenarioClassification (zh-CN)": 73.39, "MultilingualSentiment": 73.7, "OnlineShopping": 91.66, "TNews": 52.05, "Waimai": 86.83 } ] }, "Clustering": { "v_measure": [ { "Model": "bge-large-zh-noinstruct", "CLSClusteringP2P": 41.23, "CLSClusteringS2S": 40.04, "ThuNewsClusteringP2P": 62.03, "ThuNewsClusteringS2S": 56.75 } ] }, "PairClassification": { "ap": [ { "Model": "bge-large-zh-noinstruct", "Cmnli": 82.17, "Ocnli": 71.37 } ] }, "Reranking": { "map": [ { "Model": "bge-large-zh-noinstruct", "CMedQAv1": 81.72, "CMedQAv2": 84.64, "MMarcoReranking": 27.1, "T2Reranking": 66.16 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "bge-large-zh-noinstruct", "CmedqaRetrieval": 41.03, "CovidRetrieval": 75.07, "DuRetrieval": 84.68, "EcomRetrieval": 65.6, "MMarcoRetrieval": 81.38, "MedicalRetrieval": 58.28, "T2Retrieval": 84.39, "VideoRetrieval": 73.93 } ] }, "STS": { "spearman": [ { "Model": "bge-large-zh-noinstruct", "AFQMC": 43.06, "ATEC": 48.29, "BQ": 60.53, "LCQMC": 74.71, "PAWSX": 16.64, "QBQTC": 35.2, "STS22 (zh)": 67.19, "STSB": 78.41 } ] }, "Summarization": { "spearman": [ { "Model": "bge-large-zh-noinstruct" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "bge-large-zh-noinstruct" } ] } }, "e5-large-v2": { "BitextMining": { "f1": [ { "Model": "e5-large-v2" } ] }, "Classification": { "accuracy": [ { "Model": "e5-large-v2" } ] }, "Clustering": { "v_measure": [ { "Model": "e5-large-v2", "BiorxivClusteringP2P": 36.72, "BiorxivClusteringS2S": 35.47, "MedrxivClusteringP2P": 31.45, "MedrxivClusteringS2S": 29.91, "RedditClustering": 55.5, "RedditClusteringP2P": 63.71, "StackExchangeClustering": 65.23, "StackExchangeClusteringP2P": 33.62, "TwentyNewsgroupsClustering": 48.73 } ] }, "PairClassification": { "ap": [ { "Model": "e5-large-v2" } ] }, "Reranking": { "map": [ { "Model": "e5-large-v2" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "e5-large-v2" } ] }, "STS": { "spearman": [ { "Model": "e5-large-v2" } ] }, "Summarization": { "spearman": [ { "Model": "e5-large-v2" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "e5-large-v2", "Core17InstructionRetrieval": 0.12, "News21InstructionRetrieval": 0.87, "Robust04InstructionRetrieval": -4.16 } ] } }, "voyage-multilingual-2": { "BitextMining": { "f1": [ { "Model": "voyage-multilingual-2" } ] }, "Classification": { "accuracy": [ { "Model": "voyage-multilingual-2", "AmazonReviewsClassification (fr)": 43.36, "MTOPDomainClassification (fr)": 90.33, "MTOPIntentClassification (fr)": 60.52, "MasakhaNEWSClassification (fra)": 74.81, "MassiveIntentClassification (fr)": 68.06, "MassiveScenarioClassification (fr)": 74.29 } ] }, "Clustering": { "v_measure": [ { "Model": "voyage-multilingual-2", "AlloProfClusteringP2P": 65.37, "AlloProfClusteringS2S": 47.03, "HALClusteringS2S": 27.67, "MLSUMClusteringP2P (fr)": 45.99, "MLSUMClusteringS2S (fr)": 45.57, "MasakhaNEWSClusteringP2P (fra)": 44.53, "MasakhaNEWSClusteringS2S (fra)": 49.8 } ] }, "PairClassification": { "ap": [ { "Model": "voyage-multilingual-2", "OpusparcusPC (fr)": 93.68, "PawsXPairClassification (fr)": 63.64 } ] }, "Reranking": { "map": [ { "Model": "voyage-multilingual-2", "AlloprofReranking": 74.78, "SyntecReranking": 90.4 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "voyage-multilingual-2", "AlloprofRetrieval": 58.27, "BSARDRetrieval": 5.14, "LEMBNarrativeQARetrieval": 64.69, "LEMBNeedleRetrieval": 75.25, "LEMBPasskeyRetrieval": 97.0, "LEMBQMSumRetrieval": 51.49, "LEMBSummScreenFDRetrieval": 99.11, "LEMBWikimQARetrieval": 87.49, "MintakaRetrieval (fr)": 49.19, "SyntecRetrieval": 87.28, "XPQARetrieval (fr)": 72.92 } ] }, "STS": { "spearman": [ { "Model": "voyage-multilingual-2", "SICKFr": 74.9, "STS22 (fr)": 82.76, "STSBenchmarkMultilingualSTS (fr)": 82.72 } ] }, "Summarization": { "spearman": [ { "Model": "voyage-multilingual-2", "SummEvalFr": 29.96 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "voyage-multilingual-2" } ] } }, "all-MiniLM-L6-v2-instruct": { "BitextMining": { "f1": [ { "Model": "all-MiniLM-L6-v2-instruct" } ] }, "Classification": { "accuracy": [ { "Model": "all-MiniLM-L6-v2-instruct" } ] }, "Clustering": { "v_measure": [ { "Model": "all-MiniLM-L6-v2-instruct" } ] }, "PairClassification": { "ap": [ { "Model": "all-MiniLM-L6-v2-instruct" } ] }, "Reranking": { "map": [ { "Model": "all-MiniLM-L6-v2-instruct" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "all-MiniLM-L6-v2-instruct", "ARCChallenge": 9.4, "AlphaNLI": 15.09, "HellaSwag": 20.51, "PIQA": 24.68, "Quail": 3.46, "RARbCode": 42.47, "RARbMath": 62.39, "SIQA": 1.53, "SpartQA": 0.57, "TempReasonL1": 1.05, "TempReasonL2Fact": 16.57, "TempReasonL2Pure": 0.49, "TempReasonL3Fact": 14.01, "TempReasonL3Pure": 6.27, "WinoGrande": 20.73 } ] }, "STS": { "spearman": [ { "Model": "all-MiniLM-L6-v2-instruct" } ] }, "Summarization": { "spearman": [ { "Model": "all-MiniLM-L6-v2-instruct" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "all-MiniLM-L6-v2-instruct" } ] } }, "m3e-base": { "BitextMining": { "f1": [ { "Model": "m3e-base" } ] }, "Classification": { "accuracy": [ { "Model": "m3e-base", "AmazonReviewsClassification (zh)": 43.02, "IFlyTek": 44.42, "JDReview": 85.33, "MassiveIntentClassification (zh-CN)": 68.4, "MassiveScenarioClassification (zh-CN)": 74.6, "MultilingualSentiment": 71.9, "OnlineShopping": 87.77, "TNews": 48.28, "Waimai": 83.99 } ] }, "Clustering": { "v_measure": [ { "Model": "m3e-base", "CLSClusteringP2P": 39.81, "CLSClusteringS2S": 37.34, "ThuNewsClusteringP2P": 59.77, "ThuNewsClusteringS2S": 53.78 } ] }, "PairClassification": { "ap": [ { "Model": "m3e-base", "Cmnli": 69.98, "Ocnli": 58.0 } ] }, "Reranking": { "map": [ { "Model": "m3e-base", "CMedQAv1": 77.05, "CMedQAv2": 76.76, "MMarcoReranking": 17.51, "T2Reranking": 66.03 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "m3e-base", "CmedqaRetrieval": 30.33, "CovidRetrieval": 66.42, "DuRetrieval": 75.76, "EcomRetrieval": 50.27, "MMarcoRetrieval": 65.46, "MedicalRetrieval": 42.79, "T2Retrieval": 73.14, "VideoRetrieval": 51.11 } ] }, "STS": { "spearman": [ { "Model": "m3e-base", "AFQMC": 35.87, "ATEC": 41.27, "BQ": 63.81, "LCQMC": 74.88, "PAWSX": 12.19, "QBQTC": 32.07, "STS22 (zh)": 66.73, "STSB": 76.97 } ] }, "Summarization": { "spearman": [ { "Model": "m3e-base" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "m3e-base" } ] } }, "text-search-davinci-001": { "BitextMining": { "f1": [ { "Model": "text-search-davinci-001" } ] }, "Classification": { "accuracy": [ { "Model": "text-search-davinci-001" } ] }, "Clustering": { "v_measure": [ { "Model": "text-search-davinci-001" } ] }, "PairClassification": { "ap": [ { "Model": "text-search-davinci-001" } ] }, "Reranking": { "map": [ { "Model": "text-search-davinci-001" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "text-search-davinci-001", "ArguAna": 43.5, "ClimateFEVER": 22.3, "FEVER": 77.5, "FiQA2018": 51.2, "HotpotQA": 68.8, "NFCorpus": 40.7, "QuoraRetrieval": 63.8, "SciFact": 75.4, "TRECCOVID": 64.9, "Touche2020": 29.1 } ] }, "STS": { "spearman": [ { "Model": "text-search-davinci-001" } ] }, "Summarization": { "spearman": [ { "Model": "text-search-davinci-001" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "text-search-davinci-001" } ] } }, "nomic-embed-text-v1.5-256": { "BitextMining": { "f1": [ { "Model": "nomic-embed-text-v1.5-256" } ] }, "Classification": { "accuracy": [ { "Model": "nomic-embed-text-v1.5-256", "AmazonCounterfactualClassification (en)": 72.94, "AmazonPolarityClassification": 91.35, "AmazonReviewsClassification (en)": 45.73, "Banking77Classification": 83.69, "EmotionClassification": 45.88, "ImdbClassification": 83.99, "MTOPDomainClassification (en)": 91.68, "MTOPIntentClassification (en)": 72.47, "MassiveIntentClassification (en)": 71.76, "MassiveScenarioClassification (en)": 75.67, "ToxicConversationsClassification": 70.87, "TweetSentimentExtractionClassification": 59.2 } ] }, "Clustering": { "v_measure": [ { "Model": "nomic-embed-text-v1.5-256", "ArxivClusteringP2P": 44.82, "ArxivClusteringS2S": 35.32, "BiorxivClusteringP2P": 38.19, "BiorxivClusteringS2S": 31.83, "MedrxivClusteringP2P": 34.08, "MedrxivClusteringS2S": 30.98, "RedditClustering": 54.92, "RedditClusteringP2P": 60.23, "StackExchangeClustering": 61.81, "StackExchangeClusteringP2P": 34.03, "TwentyNewsgroupsClustering": 48.56 } ] }, "PairClassification": { "ap": [ { "Model": "nomic-embed-text-v1.5-256", "SprintDuplicateQuestions": 92.31, "TwitterSemEval2015": 73.61, "TwitterURLCorpus": 86.34 } ] }, "Reranking": { "map": [ { "Model": "nomic-embed-text-v1.5-256", "AskUbuntuDupQuestions": 61.34, "MindSmallReranking": 30.04, "SciDocsRR": 79.4, "StackOverflowDupQuestions": 49.95 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "nomic-embed-text-v1.5-256", "ArguAna": 45.44, "CQADupstackRetrieval": 37.61, "ClimateFEVER": 39.63, "DBPedia": 39.42, "FEVER": 84.4, "FiQA2018": 35.0, "HotpotQA": 67.78, "MSMARCO": 41.38, "NFCorpus": 32.54, "NQ": 57.1, "QuoraRetrieval": 87.65, "SCIDOCS": 16.76, "SciFact": 68.24, "TRECCOVID": 80.65, "Touche2020": 28.49 } ] }, "STS": { "spearman": [ { "Model": "nomic-embed-text-v1.5-256", "BIOSSES": 81.58, "SICK-R": 79.24, "STS12": 78.16, "STS13": 86.01, "STS14": 81.25, "STS15": 86.51, "STS16": 84.24, "STS17 (en-en)": 86.44, "STS22 (en)": 65.14, "STSBenchmark": 84.8 } ] }, "Summarization": { "spearman": [ { "Model": "nomic-embed-text-v1.5-256", "SummEval": 30.05 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "nomic-embed-text-v1.5-256" } ] } }, "sbert_large_nlu_ru": { "BitextMining": { "f1": [ { "Model": "sbert_large_nlu_ru" } ] }, "Classification": { "accuracy": [ { "Model": "sbert_large_nlu_ru", "GeoreviewClassification (rus-Cyrl)": 39.97, "HeadlineClassification (rus-Cyrl)": 79.26, "InappropriatenessClassification (rus-Cyrl)": 62.52, "KinopoiskClassification (rus-Cyrl)": 49.51, "MassiveIntentClassification (rus-Cyrl)": 61.09, "MassiveScenarioClassification (rus-Cyrl)": 67.6, "RuReviewsClassification (rus-Cyrl)": 58.27, "RuSciBenchGRNTIClassification (rus-Cyrl)": 53.9, "RuSciBenchOECDClassification (rus-Cyrl)": 43.04 } ] }, "Clustering": { "v_measure": [ { "Model": "sbert_large_nlu_ru", "GeoreviewClusteringP2P (rus-Cyrl)": 59.02, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 50.4, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 46.41 } ] }, "PairClassification": { "ap": [ { "Model": "sbert_large_nlu_ru", "TERRa (rus-Cyrl)": 50.17 } ] }, "Reranking": { "map": [ { "Model": "sbert_large_nlu_ru", "RuBQReranking (rus-Cyrl)": 46.81 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "sbert_large_nlu_ru", "RiaNewsRetrieval (rus-Cyrl)": 11.11, "RuBQRetrieval (rus-Cyrl)": 12.45 } ] }, "STS": { "spearman": [ { "Model": "sbert_large_nlu_ru", "RUParaPhraserSTS (rus-Cyrl)": 62.06, "RuSTSBenchmarkSTS (rus-Cyrl)": 58.82, "STS22 (rus-Cyrl)": 50.75 } ] }, "Summarization": { "spearman": [ { "Model": "sbert_large_nlu_ru" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "sbert_large_nlu_ru" } ] } }, "rubert-tiny": { "BitextMining": { "f1": [ { "Model": "rubert-tiny" } ] }, "Classification": { "accuracy": [ { "Model": "rubert-tiny", "GeoreviewClassification (rus-Cyrl)": 33.45, "HeadlineClassification (rus-Cyrl)": 57.65, "InappropriatenessClassification (rus-Cyrl)": 54.5, "KinopoiskClassification (rus-Cyrl)": 41.36, "MassiveIntentClassification (rus-Cyrl)": 50.1, "MassiveScenarioClassification (rus-Cyrl)": 52.15, "RuReviewsClassification (rus-Cyrl)": 49.56, "RuSciBenchGRNTIClassification (rus-Cyrl)": 35.71, "RuSciBenchOECDClassification (rus-Cyrl)": 26.51 } ] }, "Clustering": { "v_measure": [ { "Model": "rubert-tiny", "GeoreviewClusteringP2P (rus-Cyrl)": 34.4, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 29.89, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 27.98 } ] }, "PairClassification": { "ap": [ { "Model": "rubert-tiny", "TERRa (rus-Cyrl)": 51.06 } ] }, "Reranking": { "map": [ { "Model": "rubert-tiny", "RuBQReranking (rus-Cyrl)": 35.44 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "rubert-tiny", "RiaNewsRetrieval (rus-Cyrl)": 0.79, "RuBQRetrieval (rus-Cyrl)": 3.24 } ] }, "STS": { "spearman": [ { "Model": "rubert-tiny", "RUParaPhraserSTS (rus-Cyrl)": 53.41, "RuSTSBenchmarkSTS (rus-Cyrl)": 58.16, "STS22 (rus-Cyrl)": 47.88 } ] }, "Summarization": { "spearman": [ { "Model": "rubert-tiny" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "rubert-tiny" } ] } }, "google-gecko-256.text-embedding-preview-0409": { "BitextMining": { "f1": [ { "Model": "google-gecko-256.text-embedding-preview-0409" } ] }, "Classification": { "accuracy": [ { "Model": "google-gecko-256.text-embedding-preview-0409", "AmazonCounterfactualClassification (en)": 70.93, "AmazonPolarityClassification": 97.34, "AmazonReviewsClassification (en)": 48.47, "Banking77Classification": 86.01, "EmotionClassification": 51.53, "ImdbClassification": 95.7, "MTOPDomainClassification (en)": 98.02, "MTOPIntentClassification (en)": 77.82, "MassiveIntentClassification (en)": 75.67, "MassiveScenarioClassification (en)": 85.16, "ToxicConversationsClassification": 88.33, "TweetSentimentExtractionClassification": 72.97 } ] }, "Clustering": { "v_measure": [ { "Model": "google-gecko-256.text-embedding-preview-0409", "ArxivClusteringP2P": 44.12, "ArxivClusteringS2S": 36.54, "BiorxivClusteringP2P": 36.28, "BiorxivClusteringS2S": 33.09, "MedrxivClusteringP2P": 32.08, "MedrxivClusteringS2S": 30.84, "RedditClustering": 62.24, "RedditClusteringP2P": 63.7, "StackExchangeClustering": 70.19, "StackExchangeClusteringP2P": 36.1, "TwentyNewsgroupsClustering": 50.6 } ] }, "PairClassification": { "ap": [ { "Model": "google-gecko-256.text-embedding-preview-0409", "SprintDuplicateQuestions": 96.49, "TwitterSemEval2015": 78.23, "TwitterURLCorpus": 87.04 } ] }, "Reranking": { "map": [ { "Model": "google-gecko-256.text-embedding-preview-0409", "AskUbuntuDupQuestions": 63.84, "MindSmallReranking": 31.89, "SciDocsRR": 81.62, "StackOverflowDupQuestions": 53.76 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "google-gecko-256.text-embedding-preview-0409", "ArguAna": 56.27, "CQADupstackRetrieval": 45.41, "ClimateFEVER": 29.35, "DBPedia": 41.91, "FEVER": 82.61, "FiQA2018": 55.54, "HotpotQA": 64.65, "MSMARCO": 31.12, "NFCorpus": 37.81, "NQ": 57.37, "QuoraRetrieval": 87.89, "SCIDOCS": 18.21, "SciFact": 70.86, "TRECCOVID": 80.13, "Touche2020": 27.4 } ] }, "STS": { "spearman": [ { "Model": "google-gecko-256.text-embedding-preview-0409", "BIOSSES": 89.42, "SICK-R": 81.67, "STS12": 78.02, "STS13": 90.1, "STS14": 85.44, "STS15": 89.64, "STS16": 87.24, "STS17 (en-en)": 90.46, "STS22 (en)": 67.99, "STSBenchmark": 89.33 } ] }, "Summarization": { "spearman": [ { "Model": "google-gecko-256.text-embedding-preview-0409", "SummEval": 32.36 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "google-gecko-256.text-embedding-preview-0409" } ] } }, "FollowIR-7B": { "BitextMining": { "f1": [ { "Model": "FollowIR-7B" } ] }, "Classification": { "accuracy": [ { "Model": "FollowIR-7B" } ] }, "Clustering": { "v_measure": [ { "Model": "FollowIR-7B" } ] }, "PairClassification": { "ap": [ { "Model": "FollowIR-7B" } ] }, "Reranking": { "map": [ { "Model": "FollowIR-7B" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "FollowIR-7B" } ] }, "STS": { "spearman": [ { "Model": "FollowIR-7B" } ] }, "Summarization": { "spearman": [ { "Model": "FollowIR-7B" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "FollowIR-7B", "Core17InstructionRetrieval": 16.48, "News21InstructionRetrieval": 6.26, "Robust04InstructionRetrieval": 13.72 } ] } }, "bert-base-uncased": { "BitextMining": { "f1": [ { "Model": "bert-base-uncased" } ] }, "Classification": { "accuracy": [ { "Model": "bert-base-uncased", "AmazonCounterfactualClassification (en)": 74.25, "AmazonPolarityClassification": 71.33, "AmazonReviewsClassification (en)": 33.56, "Banking77Classification": 63.41, "EmotionClassification": 35.28, "ImdbClassification": 65.35, "MTOPDomainClassification (en)": 82.63, "MTOPIntentClassification (en)": 68.14, "MassiveIntentClassification (en)": 59.88, "MassiveScenarioClassification (en)": 64.28, "ToxicConversationsClassification": 70.0, "TweetSentimentExtractionClassification": 51.81 } ] }, "Clustering": { "v_measure": [ { "Model": "bert-base-uncased", "ArxivClusteringP2P": 35.19, "ArxivClusteringS2S": 27.51, "BiorxivClusteringP2P": 30.12, "BiorxivClusteringS2S": 24.77, "MedrxivClusteringP2P": 26.09, "MedrxivClusteringS2S": 23.6, "RedditClustering": 27.24, "RedditClusteringP2P": 43.32, "StackExchangeClustering": 43.58, "StackExchangeClusteringP2P": 26.55, "TwentyNewsgroupsClustering": 23.35 } ] }, "PairClassification": { "ap": [ { "Model": "bert-base-uncased", "SprintDuplicateQuestions": 36.81, "TwitterSemEval2015": 55.9, "TwitterURLCorpus": 76.29 } ] }, "Reranking": { "map": [ { "Model": "bert-base-uncased", "AskUbuntuDupQuestions": 45.84, "MindSmallReranking": 28.37, "SciDocsRR": 64.94, "StackOverflowDupQuestions": 34.62 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "bert-base-uncased", "ArguAna": 28.29, "CQADupstackRetrieval": 5.51, "ClimateFEVER": 5.41, "DBPedia": 4.13, "FEVER": 3.3, "FiQA2018": 2.19, "HotpotQA": 8.26, "MSMARCO": 1.91, "NFCorpus": 4.3, "NQ": 2.62, "QuoraRetrieval": 61.03, "SCIDOCS": 2.82, "SciFact": 13.34, "TRECCOVID": 14.74, "Touche2020": 0.97 } ] }, "STS": { "spearman": [ { "Model": "bert-base-uncased", "BIOSSES": 54.7, "SICK-R": 58.65, "STS12": 30.87, "STS13": 59.89, "STS14": 47.73, "STS15": 60.29, "STS16": 63.73, "STS17 (en-en)": 64.1, "STS22 (en)": 56.37, "STSBenchmark": 47.29 } ] }, "Summarization": { "spearman": [ { "Model": "bert-base-uncased", "SummEval": 29.82 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "bert-base-uncased" } ] } }, "xlm-roberta-base": { "BitextMining": { "f1": [ { "Model": "xlm-roberta-base", "BornholmBitextMining": 4.42 } ] }, "Classification": { "accuracy": [ { "Model": "xlm-roberta-base", "AmazonReviewsClassification (fr)": 26.75, "AngryTweetsClassification": 52.41, "DKHateClassification": 56.78, "DanishPoliticalCommentsClassification": 34.03, "LccSentimentClassification": 52.27, "MTOPDomainClassification (fr)": 43.83, "MTOPIntentClassification (fr)": 19.38, "MasakhaNEWSClassification (fra)": 60.5, "MassiveIntentClassification (da)": 41.06, "MassiveIntentClassification (nb)": 40.46, "MassiveIntentClassification (sv)": 45.12, "MassiveIntentClassification (fr)": 13.58, "MassiveScenarioClassification (da)": 43.91, "MassiveScenarioClassification (nb)": 44.83, "MassiveScenarioClassification (sv)": 47.35, "MassiveScenarioClassification (fr)": 23.21, "NoRecClassification": 46.28, "NordicLangClassification": 79.39, "NorwegianParliament": 56.75, "ScalaDaClassification": 57.3, "ScalaNbClassification": 58.33 } ] }, "Clustering": { "v_measure": [ { "Model": "xlm-roberta-base", "AlloProfClusteringP2P": 52.24, "AlloProfClusteringS2S": 20.37, "HALClusteringS2S": 8.68, "MLSUMClusteringP2P": 40.44, "MLSUMClusteringS2S": 24.14, "MasakhaNEWSClusteringP2P (fra)": 29.29, "MasakhaNEWSClusteringS2S (fra)": 23.76 } ] }, "PairClassification": { "ap": [ { "Model": "xlm-roberta-base", "OpusparcusPC (fr)": 85.45, "PawsXPairClassification (fr)": 51.35 } ] }, "Reranking": { "map": [ { "Model": "xlm-roberta-base", "AlloprofReranking": 25.58, "SyntecReranking": 43.75 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "xlm-roberta-base", "AlloprofRetrieval": 0.16, "BSARDRetrieval": 0.0, "MintakaRetrieval (fr)": 0.88, "SyntecRetrieval": 3.33, "XPQARetrieval (fr)": 11.65 } ] }, "STS": { "spearman": [ { "Model": "xlm-roberta-base", "SICKFr": 48.62, "STS22 (fr)": 56.72, "STSBenchmarkMultilingualSTS (fr)": 46.23 } ] }, "Summarization": { "spearman": [ { "Model": "xlm-roberta-base", "SummEvalFr": 29.14 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "xlm-roberta-base" } ] } }, "text-embedding-3-large-256": { "BitextMining": { "f1": [ { "Model": "text-embedding-3-large-256" } ] }, "Classification": { "accuracy": [ { "Model": "text-embedding-3-large-256", "AmazonCounterfactualClassification (en)": 73.96, "AmazonPolarityClassification": 91.32, "AmazonReviewsClassification (en)": 46.03, "Banking77Classification": 83.19, "EmotionClassification": 45.8, "ImdbClassification": 85.93, "MTOPDomainClassification (en)": 92.76, "MTOPIntentClassification (en)": 70.45, "MassiveIntentClassification (en)": 71.12, "MassiveScenarioClassification (en)": 75.56, "ToxicConversationsClassification": 68.52, "TweetSentimentExtractionClassification": 58.98 } ] }, "Clustering": { "v_measure": [ { "Model": "text-embedding-3-large-256", "ArxivClusteringP2P": 47.05, "ArxivClusteringS2S": 42.59, "BiorxivClusteringP2P": 35.43, "BiorxivClusteringS2S": 33.86, "MedrxivClusteringP2P": 32.1, "MedrxivClusteringS2S": 31.15, "RedditClustering": 60.18, "RedditClusteringP2P": 64.71, "StackExchangeClustering": 71.23, "StackExchangeClusteringP2P": 35.95, "TwentyNewsgroupsClustering": 54.24 } ] }, "PairClassification": { "ap": [ { "Model": "text-embedding-3-large-256", "SprintDuplicateQuestions": 89.02, "TwitterSemEval2015": 76.56, "TwitterURLCorpus": 87.09 } ] }, "Reranking": { "map": [ { "Model": "text-embedding-3-large-256", "AskUbuntuDupQuestions": 64.61, "MindSmallReranking": 29.63, "SciDocsRR": 84.25, "StackOverflowDupQuestions": 53.46 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "text-embedding-3-large-256", "ArguAna": 55.6, "CQADupstackRetrieval": 42.28, "ClimateFEVER": 25.8, "DBPedia": 40.8, "FEVER": 84.57, "FiQA2018": 50.33, "HotpotQA": 62.69, "MSMARCO": 37.93, "NFCorpus": 37.94, "NQ": 56.64, "QuoraRetrieval": 88.22, "SCIDOCS": 20.44, "SciFact": 73.1, "TRECCOVID": 76.24, "Touche2020": 22.31 } ] }, "STS": { "spearman": [ { "Model": "text-embedding-3-large-256", "BIOSSES": 84.87, "SICK-R": 79.18, "STS12": 71.98, "STS13": 85.52, "STS14": 80.5, "STS15": 87.51, "STS16": 84.48, "STS17 (en-en)": 88.11, "STS22 (en)": 65.92, "STSBenchmark": 82.34 } ] }, "Summarization": { "spearman": [ { "Model": "text-embedding-3-large-256", "SummEval": 29.92 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "text-embedding-3-large-256" } ] } }, "LaBSE-en-ru": { "BitextMining": { "f1": [ { "Model": "LaBSE-en-ru", "Tatoeba (rus-Cyrl_eng-Latn)": 93.62 } ] }, "Classification": { "accuracy": [ { "Model": "LaBSE-en-ru", "GeoreviewClassification (rus-Cyrl)": 40.89, "HeadlineClassification (rus-Cyrl)": 68.75, "InappropriatenessClassification (rus-Cyrl)": 58.48, "KinopoiskClassification (rus-Cyrl)": 49.85, "MassiveIntentClassification (swa-Latn)": 19.98, "MassiveIntentClassification (aze-Latn)": 19.52, "MassiveIntentClassification (tur-Latn)": 24.12, "MassiveIntentClassification (cmo-Hans)": 3.96, "MassiveIntentClassification (amh-Ethi)": 2.76, "MassiveIntentClassification (kan-Knda)": 2.86, "MassiveIntentClassification (hin-Deva)": 3.29, "MassiveIntentClassification (tgl-Latn)": 27.08, "MassiveIntentClassification (tha-Thai)": 4.0, "MassiveIntentClassification (swe-Latn)": 32.01, "MassiveIntentClassification (deu-Latn)": 35.14, "MassiveIntentClassification (spa-Latn)": 37.67, "MassiveIntentClassification (por-Latn)": 39.84, "MassiveIntentClassification (jpn-Jpan)": 4.78, "MassiveIntentClassification (fin-Latn)": 31.11, "MassiveIntentClassification (kat-Geor)": 2.87, "MassiveIntentClassification (slv-Latn)": 35.66, "MassiveIntentClassification (rus-Cyrl)": 60.53, "MassiveIntentClassification (ita-Latn)": 43.32, "MassiveIntentClassification (tel-Telu)": 2.72, "MassiveIntentClassification (afr-Latn)": 30.59, "MassiveIntentClassification (isl-Latn)": 25.61, "MassiveIntentClassification (fas-Arab)": 3.71, "MassiveIntentClassification (vie-Latn)": 23.0, "MassiveIntentClassification (ben-Beng)": 3.35, "MassiveIntentClassification (hye-Armn)": 2.8, "MassiveIntentClassification (pol-Latn)": 31.3, "MassiveIntentClassification (cym-Latn)": 26.59, "MassiveIntentClassification (jav-Latn)": 26.84, "MassiveIntentClassification (mon-Cyrl)": 35.97, "MassiveIntentClassification (en)": 60.48, "MassiveIntentClassification (msa-Latn)": 27.82, "MassiveIntentClassification (nob-Latn)": 35.78, "MassiveIntentClassification (heb-Hebr)": 2.33, "MassiveIntentClassification (khm-Khmr)": 4.6, "MassiveIntentClassification (nld-Latn)": 34.66, "MassiveIntentClassification (ind-Latn)": 33.31, "MassiveIntentClassification (mal-Mlym)": 2.63, "MassiveIntentClassification (tam-Taml)": 2.22, "MassiveIntentClassification (mya-Mymr)": 3.57, "MassiveIntentClassification (urd-Arab)": 3.36, "MassiveIntentClassification (dan-Latn)": 38.66, "MassiveIntentClassification (cmo-Hant)": 5.29, "MassiveIntentClassification (ron-Latn)": 37.45, "MassiveIntentClassification (lav-Latn)": 23.92, "MassiveIntentClassification (fra-Latn)": 40.29, "MassiveIntentClassification (ell-Grek)": 11.14, "MassiveIntentClassification (sqi-Latn)": 35.84, "MassiveIntentClassification (hun-Latn)": 26.74, "MassiveIntentClassification (kor-Kore)": 2.69, "MassiveIntentClassification (ara-Arab)": 5.19, "MassiveScenarioClassification (swa-Latn)": 25.61, "MassiveScenarioClassification (aze-Latn)": 24.48, "MassiveScenarioClassification (tur-Latn)": 31.38, "MassiveScenarioClassification (cmo-Hans)": 9.98, "MassiveScenarioClassification (amh-Ethi)": 7.59, "MassiveScenarioClassification (kan-Knda)": 8.73, "MassiveScenarioClassification (hin-Deva)": 8.77, "MassiveScenarioClassification (tgl-Latn)": 35.12, "MassiveScenarioClassification (tha-Thai)": 8.69, "MassiveScenarioClassification (swe-Latn)": 35.83, "MassiveScenarioClassification (deu-Latn)": 41.72, "MassiveScenarioClassification (spa-Latn)": 43.33, "MassiveScenarioClassification (por-Latn)": 44.62, "MassiveScenarioClassification (jpn-Jpan)": 9.51, "MassiveScenarioClassification (fin-Latn)": 33.79, "MassiveScenarioClassification (kat-Geor)": 7.32, "MassiveScenarioClassification (slv-Latn)": 37.6, "MassiveScenarioClassification (rus-Cyrl)": 65.15, "MassiveScenarioClassification (ita-Latn)": 47.28, "MassiveScenarioClassification (tel-Telu)": 7.53, "MassiveScenarioClassification (afr-Latn)": 37.27, "MassiveScenarioClassification (isl-Latn)": 30.32, "MassiveScenarioClassification (fas-Arab)": 6.83, "MassiveScenarioClassification (vie-Latn)": 28.92, "MassiveScenarioClassification (ben-Beng)": 8.57, "MassiveScenarioClassification (hye-Armn)": 8.91, "MassiveScenarioClassification (pol-Latn)": 33.75, "MassiveScenarioClassification (cym-Latn)": 30.38, "MassiveScenarioClassification (jav-Latn)": 33.94, "MassiveScenarioClassification (mon-Cyrl)": 41.53, "MassiveScenarioClassification (en)": 65.43, "MassiveScenarioClassification (msa-Latn)": 36.28, "MassiveScenarioClassification (nob-Latn)": 42.43, "MassiveScenarioClassification (heb-Hebr)": 8.64, "MassiveScenarioClassification (khm-Khmr)": 9.99, "MassiveScenarioClassification (nld-Latn)": 41.47, "MassiveScenarioClassification (ind-Latn)": 39.05, "MassiveScenarioClassification (mal-Mlym)": 7.24, "MassiveScenarioClassification (tam-Taml)": 7.71, "MassiveScenarioClassification (mya-Mymr)": 9.94, "MassiveScenarioClassification (urd-Arab)": 9.16, "MassiveScenarioClassification (dan-Latn)": 44.69, "MassiveScenarioClassification (cmo-Hant)": 10.48, "MassiveScenarioClassification (ron-Latn)": 44.55, "MassiveScenarioClassification (lav-Latn)": 26.26, "MassiveScenarioClassification (fra-Latn)": 45.08, "MassiveScenarioClassification (ell-Grek)": 19.46, "MassiveScenarioClassification (sqi-Latn)": 40.9, "MassiveScenarioClassification (hun-Latn)": 33.92, "MassiveScenarioClassification (kor-Kore)": 7.37, "MassiveScenarioClassification (ara-Arab)": 12.43, "RuReviewsClassification (rus-Cyrl)": 58.01, "RuSciBenchGRNTIClassification (rus-Cyrl)": 52.8, "RuSciBenchOECDClassification (rus-Cyrl)": 40.36 } ] }, "Clustering": { "v_measure": [ { "Model": "LaBSE-en-ru", "GeoreviewClusteringP2P (rus-Cyrl)": 51.89, "MLSUMClusteringP2P (rus-Cyrl)": 37.87, "MLSUMClusteringS2S (rus-Cyrl)": 41.24, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 47.48, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 41.16 } ] }, "PairClassification": { "ap": [ { "Model": "LaBSE-en-ru", "OpusparcusPC (rus-Cyrl)": 87.18, "TERRa (rus-Cyrl)": 55.61 } ] }, "Reranking": { "map": [ { "Model": "LaBSE-en-ru", "RuBQReranking (rus-Cyrl)": 54.83 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "LaBSE-en-ru", "RiaNewsRetrieval (rus-Cyrl)": 34.73, "RuBQRetrieval (rus-Cyrl)": 29.03 } ] }, "STS": { "spearman": [ { "Model": "LaBSE-en-ru", "RUParaPhraserSTS (rus-Cyrl)": 65.87, "RuSTSBenchmarkSTS (rus-Cyrl)": 73.32, "STS22 (deu-Latn)": 38.9, "STS22 (en)": 59.47, "STS22 (pol-Latn_eng-Latn)": 58.73, "STS22 (spa-Latn)": 60.85, "STS22 (fra-Latn)": 74.98, "STS22 (deu-Latn_eng-Latn)": 47.98, "STS22 (deu-Latn_fra-Latn)": 59.4, "STS22 (deu-Latn_pol-Latn)": 39.48, "STS22 (pol-Latn)": 32.74, "STS22 (tur-Latn)": 55.04, "STS22 (spa-Latn_eng-Latn)": 70.8, "STS22 (rus-Cyrl)": 58.53, "STS22 (ita-Latn)": 68.58, "STS22 (fra-Latn_pol-Latn)": 61.98, "STS22 (spa-Latn_ita-Latn)": 66.83, "STS22 (cmn-Hans_eng-Latn)": 24.98, "STS22 (ara-Arab)": 31.85, "STS22 (cmn-Hans)": 35.1, "STSBenchmarkMultilingualSTS (rus-Cyrl)": 73.02 } ] }, "Summarization": { "spearman": [ { "Model": "LaBSE-en-ru" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "LaBSE-en-ru" } ] } }, "gelectra-base": { "BitextMining": { "f1": [ { "Model": "gelectra-base" } ] }, "Classification": { "accuracy": [ { "Model": "gelectra-base" } ] }, "Clustering": { "v_measure": [ { "Model": "gelectra-base", "BlurbsClusteringP2P": 10.06, "BlurbsClusteringS2S": 7.74, "TenKGnadClusteringP2P": 9.02, "TenKGnadClusteringS2S": 4.11 } ] }, "PairClassification": { "ap": [ { "Model": "gelectra-base" } ] }, "Reranking": { "map": [ { "Model": "gelectra-base" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "gelectra-base" } ] }, "STS": { "spearman": [ { "Model": "gelectra-base" } ] }, "Summarization": { "spearman": [ { "Model": "gelectra-base" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "gelectra-base" } ] } }, "OpenSearch-text-hybrid": { "BitextMining": { "f1": [ { "Model": "OpenSearch-text-hybrid" } ] }, "Classification": { "accuracy": [ { "Model": "OpenSearch-text-hybrid", "AmazonReviewsClassification (zh)": 46.18, "IFlyTek": 51.8, "JDReview": 86.02, "MassiveIntentClassification (zh-CN)": 73.85, "MassiveScenarioClassification (zh-CN)": 77.13, "MultilingualSentiment": 76.35, "OnlineShopping": 93.2, "TNews": 53.06, "Waimai": 88.1 } ] }, "Clustering": { "v_measure": [ { "Model": "OpenSearch-text-hybrid", "CLSClusteringP2P": 41.64, "CLSClusteringS2S": 40.33, "ThuNewsClusteringP2P": 69.28, "ThuNewsClusteringS2S": 63.75 } ] }, "PairClassification": { "ap": [ { "Model": "OpenSearch-text-hybrid", "Cmnli": 90.77, "Ocnli": 85.44 } ] }, "Reranking": { "map": [ { "Model": "OpenSearch-text-hybrid", "CMedQAv1": 88.99, "CMedQAv2": 89.6, "MMarcoReranking": 28.12, "T2Reranking": 66.38 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "OpenSearch-text-hybrid", "CmedqaRetrieval": 46.56, "CovidRetrieval": 84.03, "DuRetrieval": 87.85, "EcomRetrieval": 68.79, "MMarcoRetrieval": 79.93, "MedicalRetrieval": 65.92, "T2Retrieval": 86.76, "VideoRetrieval": 75.43 } ] }, "STS": { "spearman": [ { "Model": "OpenSearch-text-hybrid", "AFQMC": 59.11, "ATEC": 58.19, "BQ": 71.07, "LCQMC": 78.27, "PAWSX": 44.98, "QBQTC": 38.69, "STS22 (zh)": 66.53, "STSB": 82.8 } ] }, "Summarization": { "spearman": [ { "Model": "OpenSearch-text-hybrid" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "OpenSearch-text-hybrid" } ] } }, "gbert-large": { "BitextMining": { "f1": [ { "Model": "gbert-large" } ] }, "Classification": { "accuracy": [ { "Model": "gbert-large" } ] }, "Clustering": { "v_measure": [ { "Model": "gbert-large", "BlurbsClusteringP2P": 39.3, "BlurbsClusteringS2S": 13.38, "TenKGnadClusteringP2P": 41.69, "TenKGnadClusteringS2S": 34.97 } ] }, "PairClassification": { "ap": [ { "Model": "gbert-large" } ] }, "Reranking": { "map": [ { "Model": "gbert-large" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "gbert-large" } ] }, "STS": { "spearman": [ { "Model": "gbert-large" } ] }, "Summarization": { "spearman": [ { "Model": "gbert-large" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "gbert-large" } ] } }, "msmarco-bert-co-condensor": { "BitextMining": { "f1": [ { "Model": "msmarco-bert-co-condensor" } ] }, "Classification": { "accuracy": [ { "Model": "msmarco-bert-co-condensor", "AmazonCounterfactualClassification (en)": 64.06, "AmazonPolarityClassification": 66.88, "AmazonReviewsClassification (en)": 34.85, "Banking77Classification": 82.35, "EmotionClassification": 41.91, "ImdbClassification": 60.17, "MTOPDomainClassification (en)": 91.34, "MTOPIntentClassification (en)": 71.07, "MassiveIntentClassification (en)": 70.4, "MassiveScenarioClassification (en)": 73.73, "ToxicConversationsClassification": 64.01, "TweetSentimentExtractionClassification": 55.74 } ] }, "Clustering": { "v_measure": [ { "Model": "msmarco-bert-co-condensor", "ArxivClusteringP2P": 36.94, "ArxivClusteringS2S": 29.03, "BiorxivClusteringP2P": 32.35, "BiorxivClusteringS2S": 28.16, "MedrxivClusteringP2P": 30.23, "MedrxivClusteringS2S": 27.01, "RedditClustering": 48.04, "RedditClusteringP2P": 53.53, "StackExchangeClustering": 59.54, "StackExchangeClusteringP2P": 30.48, "TwentyNewsgroupsClustering": 38.68 } ] }, "PairClassification": { "ap": [ { "Model": "msmarco-bert-co-condensor", "SprintDuplicateQuestions": 96.09, "TwitterSemEval2015": 65.95, "TwitterURLCorpus": 83.17 } ] }, "Reranking": { "map": [ { "Model": "msmarco-bert-co-condensor", "AskUbuntuDupQuestions": 58.99, "MindSmallReranking": 27.13, "SciDocsRR": 72.78, "StackOverflowDupQuestions": 48.48 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "msmarco-bert-co-condensor", "ArguAna": 45.15, "CQADupstackRetrieval": 27.72, "ClimateFEVER": 16.96, "DBPedia": 27.86, "FEVER": 45.68, "FiQA2018": 15.62, "HotpotQA": 35.61, "MSMARCO": 29.57, "NFCorpus": 22.29, "NQ": 29.85, "QuoraRetrieval": 86.51, "SCIDOCS": 10.13, "SciFact": 52.31, "TRECCOVID": 40.54, "Touche2020": 8.57 } ] }, "STS": { "spearman": [ { "Model": "msmarco-bert-co-condensor", "BIOSSES": 77.32, "SICK-R": 72.0, "STS12": 68.19, "STS13": 80.4, "STS14": 74.02, "STS15": 82.57, "STS16": 79.78, "STS17 (en-en)": 85.94, "STS22 (en)": 67.54, "STSBenchmark": 76.97 } ] }, "Summarization": { "spearman": [ { "Model": "msmarco-bert-co-condensor", "SummEval": 29.5 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "msmarco-bert-co-condensor" } ] } }, "paraphrase-multilingual-mpnet-base-v2": { "BitextMining": { "f1": [ { "Model": "paraphrase-multilingual-mpnet-base-v2", "BUCC (de-en)": 98.59, "BUCC (fr-en)": 96.89, "BUCC (ru-en)": 96.44, "BUCC (zh-en)": 97.56, "BornholmBitextMining (dan-Latn)": 18.18, "Tatoeba (afr-eng)": 72.96, "Tatoeba (amh-eng)": 53.49, "Tatoeba (ang-eng)": 16.72, "Tatoeba (ara-eng)": 90.19, "Tatoeba (arq-eng)": 19.84, "Tatoeba (arz-eng)": 55.69, "Tatoeba (ast-eng)": 70.08, "Tatoeba (awa-eng)": 42.83, "Tatoeba (aze-eng)": 76.36, "Tatoeba (bel-eng)": 79.94, "Tatoeba (ben-eng)": 64.9, "Tatoeba (ber-eng)": 4.88, "Tatoeba (bos-eng)": 94.02, "Tatoeba (bre-eng)": 6.42, "Tatoeba (bul-eng)": 93.52, "Tatoeba (cat-eng)": 96.05, "Tatoeba (cbk-eng)": 58.68, "Tatoeba (ceb-eng)": 7.39, "Tatoeba (ces-eng)": 95.73, "Tatoeba (cha-eng)": 12.59, "Tatoeba (cmn-eng)": 95.83, "Tatoeba (cor-eng)": 3.53, "Tatoeba (csb-eng)": 23.73, "Tatoeba (cym-eng)": 22.31, "Tatoeba (dan-eng)": 96.17, "Tatoeba (deu-eng)": 97.73, "Tatoeba (dsb-eng)": 36.85, "Tatoeba (dtp-eng)": 5.03, "Tatoeba (ell-eng)": 94.93, "Tatoeba (epo-eng)": 55.12, "Tatoeba (est-eng)": 98.4, "Tatoeba (eus-eng)": 31.33, "Tatoeba (fao-eng)": 38.24, "Tatoeba (fin-eng)": 95.92, "Tatoeba (fra-eng)": 93.12, "Tatoeba (fry-eng)": 43.54, "Tatoeba (gla-eng)": 4.72, "Tatoeba (gle-eng)": 16.85, "Tatoeba (glg-eng)": 95.32, "Tatoeba (gsw-eng)": 25.12, "Tatoeba (heb-eng)": 88.26, "Tatoeba (hin-eng)": 97.75, "Tatoeba (hrv-eng)": 97.0, "Tatoeba (hsb-eng)": 44.32, "Tatoeba (hun-eng)": 94.18, "Tatoeba (hye-eng)": 94.38, "Tatoeba (ido-eng)": 43.91, "Tatoeba (ile-eng)": 60.36, "Tatoeba (ina-eng)": 84.32, "Tatoeba (ind-eng)": 93.5, "Tatoeba (isl-eng)": 59.25, "Tatoeba (ita-eng)": 93.76, "Tatoeba (jav-eng)": 23.39, "Tatoeba (jpn-eng)": 92.51, "Tatoeba (kab-eng)": 1.41, "Tatoeba (kat-eng)": 95.46, "Tatoeba (kaz-eng)": 61.49, "Tatoeba (khm-eng)": 58.8, "Tatoeba (kor-eng)": 93.07, "Tatoeba (kur-eng)": 61.44, "Tatoeba (kzj-eng)": 5.88, "Tatoeba (lat-eng)": 24.25, "Tatoeba (lfn-eng)": 49.56, "Tatoeba (lit-eng)": 95.37, "Tatoeba (lvs-eng)": 97.53, "Tatoeba (mal-eng)": 88.46, "Tatoeba (mar-eng)": 93.83, "Tatoeba (max-eng)": 48.77, "Tatoeba (mhr-eng)": 7.57, "Tatoeba (mkd-eng)": 93.02, "Tatoeba (mon-eng)": 96.14, "Tatoeba (nds-eng)": 38.88, "Tatoeba (nld-eng)": 95.5, "Tatoeba (nno-eng)": 81.41, "Tatoeba (nob-eng)": 98.53, "Tatoeba (nov-eng)": 50.23, "Tatoeba (oci-eng)": 43.49, "Tatoeba (orv-eng)": 23.77, "Tatoeba (pam-eng)": 5.39, "Tatoeba (pes-eng)": 93.47, "Tatoeba (pms-eng)": 34.19, "Tatoeba (pol-eng)": 96.95, "Tatoeba (por-eng)": 93.02, "Tatoeba (ron-eng)": 96.43, "Tatoeba (rus-eng)": 92.92, "Tatoeba (slk-eng)": 96.62, "Tatoeba (slv-eng)": 97.08, "Tatoeba (spa-eng)": 97.0, "Tatoeba (sqi-eng)": 98.57, "Tatoeba (srp-eng)": 94.12, "Tatoeba (swe-eng)": 95.45, "Tatoeba (swg-eng)": 22.8, "Tatoeba (swh-eng)": 16.02, "Tatoeba (tam-eng)": 73.6, "Tatoeba (tat-eng)": 10.89, "Tatoeba (tel-eng)": 79.73, "Tatoeba (tgl-eng)": 17.67, "Tatoeba (tha-eng)": 95.99, "Tatoeba (tuk-eng)": 14.91, "Tatoeba (tur-eng)": 96.17, "Tatoeba (tzl-eng)": 34.21, "Tatoeba (uig-eng)": 48.35, "Tatoeba (ukr-eng)": 92.67, "Tatoeba (urd-eng)": 95.12, "Tatoeba (uzb-eng)": 23.19, "Tatoeba (vie-eng)": 97.23, "Tatoeba (war-eng)": 7.42, "Tatoeba (wuu-eng)": 78.25, "Tatoeba (xho-eng)": 6.53, "Tatoeba (yid-eng)": 30.73, "Tatoeba (yue-eng)": 77.58, "Tatoeba (zsm-eng)": 95.8, "Tatoeba (gsw-Latn_eng-Latn)": 25.12, "Tatoeba (spa-Latn_eng-Latn)": 97.0, "Tatoeba (lat-Latn_eng-Latn)": 24.25, "Tatoeba (hun-Latn_eng-Latn)": 94.18, "Tatoeba (eus-Latn_eng-Latn)": 31.33, "Tatoeba (heb-Hebr_eng-Latn)": 88.26, "Tatoeba (ang-Latn_eng-Latn)": 16.72, "Tatoeba (swe-Latn_eng-Latn)": 95.45, "Tatoeba (slk-Latn_eng-Latn)": 96.62, "Tatoeba (ell-Grek_eng-Latn)": 94.93, "Tatoeba (nld-Latn_eng-Latn)": 95.5, "Tatoeba (cym-Latn_eng-Latn)": 22.31, "Tatoeba (sqi-Latn_eng-Latn)": 98.57, "Tatoeba (csb-Latn_eng-Latn)": 23.73, "Tatoeba (ben-Beng_eng-Latn)": 64.9, "Tatoeba (bre-Latn_eng-Latn)": 6.42, "Tatoeba (mkd-Cyrl_eng-Latn)": 93.02, "Tatoeba (cmn-Hans_eng-Latn)": 95.83, "Tatoeba (deu-Latn_eng-Latn)": 97.73, "Tatoeba (fao-Latn_eng-Latn)": 38.24, "Tatoeba (afr-Latn_eng-Latn)": 72.96, "Tatoeba (nno-Latn_eng-Latn)": 81.41, "Tatoeba (jpn-Jpan_eng-Latn)": 92.51, "Tatoeba (tzl-Latn_eng-Latn)": 34.21, "Tatoeba (arz-Arab_eng-Latn)": 55.69, "Tatoeba (ita-Latn_eng-Latn)": 93.76, "Tatoeba (arq-Arab_eng-Latn)": 19.84, "Tatoeba (uzb-Latn_eng-Latn)": 23.19, "Tatoeba (rus-Cyrl_eng-Latn)": 92.92, "Tatoeba (tat-Cyrl_eng-Latn)": 10.89, "Tatoeba (fin-Latn_eng-Latn)": 95.92, "Tatoeba (nob-Latn_eng-Latn)": 98.53, "Tatoeba (tam-Taml_eng-Latn)": 73.6, "Tatoeba (kur-Latn_eng-Latn)": 61.44, "Tatoeba (wuu-Hans_eng-Latn)": 78.25, "Tatoeba (cor-Latn_eng-Latn)": 3.53, "Tatoeba (cha-Latn_eng-Latn)": 12.59, "Tatoeba (hsb-Latn_eng-Latn)": 44.32, "Tatoeba (max-Deva_eng-Latn)": 48.77, "Tatoeba (kat-Geor_eng-Latn)": 95.46, "Tatoeba (mal-Mlym_eng-Latn)": 88.46, "Tatoeba (ina-Latn_eng-Latn)": 84.32, "Tatoeba (cbk-Latn_eng-Latn)": 58.68, "Tatoeba (yid-Hebr_eng-Latn)": 30.73, "Tatoeba (swg-Latn_eng-Latn)": 22.8, "Tatoeba (dtp-Latn_eng-Latn)": 5.03, "Tatoeba (ber-Tfng_eng-Latn)": 4.88, "Tatoeba (epo-Latn_eng-Latn)": 55.12, "Tatoeba (mar-Deva_eng-Latn)": 93.83, "Tatoeba (kaz-Cyrl_eng-Latn)": 61.49, "Tatoeba (tgl-Latn_eng-Latn)": 17.67, "Tatoeba (hrv-Latn_eng-Latn)": 97.0, "Tatoeba (bel-Cyrl_eng-Latn)": 79.94, "Tatoeba (pam-Latn_eng-Latn)": 5.39, "Tatoeba (zsm-Latn_eng-Latn)": 95.8, "Tatoeba (ces-Latn_eng-Latn)": 95.73, "Tatoeba (gla-Latn_eng-Latn)": 4.72, "Tatoeba (hin-Deva_eng-Latn)": 97.75, "Tatoeba (slv-Latn_eng-Latn)": 97.08, "Tatoeba (cat-Latn_eng-Latn)": 96.05, "Tatoeba (war-Latn_eng-Latn)": 7.42, "Tatoeba (hye-Armn_eng-Latn)": 94.38, "Tatoeba (ind-Latn_eng-Latn)": 93.5, "Tatoeba (kor-Hang_eng-Latn)": 93.07, "Tatoeba (por-Latn_eng-Latn)": 93.02, "Tatoeba (fry-Latn_eng-Latn)": 43.54, "Tatoeba (dan-Latn_eng-Latn)": 96.17, "Tatoeba (nov-Latn_eng-Latn)": 50.23, "Tatoeba (vie-Latn_eng-Latn)": 97.23, "Tatoeba (kzj-Latn_eng-Latn)": 5.88, "Tatoeba (ido-Latn_eng-Latn)": 43.91, "Tatoeba (tuk-Latn_eng-Latn)": 14.91, "Tatoeba (glg-Latn_eng-Latn)": 95.32, "Tatoeba (bos-Latn_eng-Latn)": 94.02, "Tatoeba (gle-Latn_eng-Latn)": 16.85, "Tatoeba (fra-Latn_eng-Latn)": 93.12, "Tatoeba (lvs-Latn_eng-Latn)": 97.53, "Tatoeba (mon-Cyrl_eng-Latn)": 96.14, "Tatoeba (lit-Latn_eng-Latn)": 95.37, "Tatoeba (ron-Latn_eng-Latn)": 96.43, "Tatoeba (pms-Latn_eng-Latn)": 34.19, "Tatoeba (lfn-Latn_eng-Latn)": 49.56, "Tatoeba (isl-Latn_eng-Latn)": 59.25, "Tatoeba (xho-Latn_eng-Latn)": 6.53, "Tatoeba (orv-Cyrl_eng-Latn)": 23.77, "Tatoeba (ukr-Cyrl_eng-Latn)": 92.67, "Tatoeba (dsb-Latn_eng-Latn)": 36.85, "Tatoeba (nds-Latn_eng-Latn)": 38.88, "Tatoeba (amh-Ethi_eng-Latn)": 53.49, "Tatoeba (yue-Hant_eng-Latn)": 77.58, "Tatoeba (urd-Arab_eng-Latn)": 95.12, "Tatoeba (tel-Telu_eng-Latn)": 79.73, "Tatoeba (ile-Latn_eng-Latn)": 60.36, "Tatoeba (jav-Latn_eng-Latn)": 23.39, "Tatoeba (ast-Latn_eng-Latn)": 70.08, "Tatoeba (tha-Thai_eng-Latn)": 95.99, "Tatoeba (ara-Arab_eng-Latn)": 90.19, "Tatoeba (pes-Arab_eng-Latn)": 93.47, "Tatoeba (awa-Deva_eng-Latn)": 42.83, "Tatoeba (tur-Latn_eng-Latn)": 96.17, "Tatoeba (ceb-Latn_eng-Latn)": 7.39, "Tatoeba (swh-Latn_eng-Latn)": 16.02, "Tatoeba (srp-Cyrl_eng-Latn)": 94.12, "Tatoeba (est-Latn_eng-Latn)": 98.4, "Tatoeba (aze-Latn_eng-Latn)": 76.36, "Tatoeba (bul-Cyrl_eng-Latn)": 93.52, "Tatoeba (oci-Latn_eng-Latn)": 43.49, "Tatoeba (pol-Latn_eng-Latn)": 96.95, "Tatoeba (kab-Latn_eng-Latn)": 1.41, "Tatoeba (khm-Khmr_eng-Latn)": 58.8, "Tatoeba (uig-Arab_eng-Latn)": 48.35, "Tatoeba (mhr-Cyrl_eng-Latn)": 7.57 } ] }, "Classification": { "accuracy": [ { "Model": "paraphrase-multilingual-mpnet-base-v2", "AllegroReviews": 33.86, "AllegroReviews (pol-Latn)": 33.89, "AmazonCounterfactualClassification (de)": 69.95, "AmazonCounterfactualClassification (en)": 75.81, "AmazonCounterfactualClassification (en-ext)": 76.25, "AmazonCounterfactualClassification (ja)": 69.79, "AmazonCounterfactualClassification (deu-Latn)": 69.96, "AmazonCounterfactualClassification (jpn-Jpan)": 69.78, "AmazonPolarityClassification": 76.41, "AmazonReviewsClassification (de)": 39.52, "AmazonReviewsClassification (en)": 38.52, "AmazonReviewsClassification (es)": 39.99, "AmazonReviewsClassification (fr)": 39.0, "AmazonReviewsClassification (ja)": 36.64, "AmazonReviewsClassification (zh)": 37.74, "AmazonReviewsClassification (deu-Latn)": 39.53, "AmazonReviewsClassification (spa-Latn)": 39.97, "AmazonReviewsClassification (fra-Latn)": 38.98, "AmazonReviewsClassification (jpn-Jpan)": 36.65, "AmazonReviewsClassification (cmn-Hans)": 37.74, "AngryTweetsClassification (dan-Latn)": 54.84, "Banking77Classification": 81.1, "CBD": 65.0, "CBD (pol-Latn)": 64.97, "DanishPoliticalCommentsClassification (dan-Latn)": 40.96, "EmotionClassification": 45.85, "GeoreviewClassification (rus-Cyrl)": 42.33, "HeadlineClassification (rus-Cyrl)": 70.35, "IFlyTek (cmn-Hans)": 43.98, "ImdbClassification": 64.58, "InappropriatenessClassification (rus-Cyrl)": 59.32, "JDReview (cmn-Hans)": 70.34, "KinopoiskClassification (rus-Cyrl)": 44.31, "LccSentimentClassification (dan-Latn)": 58.4, "MTOPDomainClassification (de)": 85.73, "MTOPDomainClassification (en)": 89.24, "MTOPDomainClassification (es)": 86.96, "MTOPDomainClassification (fr)": 81.21, "MTOPDomainClassification (hi)": 84.76, "MTOPDomainClassification (th)": 82.51, "MTOPDomainClassification (deu-Latn)": 85.73, "MTOPDomainClassification (spa-Latn)": 86.98, "MTOPDomainClassification (fra-Latn)": 81.21, "MTOPDomainClassification (hin-Deva)": 84.76, "MTOPDomainClassification (tha-Thai)": 82.51, "MTOPIntentClassification (de)": 61.27, "MTOPIntentClassification (en)": 68.69, "MTOPIntentClassification (es)": 66.59, "MTOPIntentClassification (fr)": 59.76, "MTOPIntentClassification (hi)": 62.37, "MTOPIntentClassification (th)": 64.8, "MTOPIntentClassification (deu-Latn)": 61.26, "MTOPIntentClassification (spa-Latn)": 66.6, "MTOPIntentClassification (fra-Latn)": 59.75, "MTOPIntentClassification (hin-Deva)": 62.38, "MTOPIntentClassification (tha-Thai)": 64.77, "MasakhaNEWSClassification (fra)": 78.1, "MasakhaNEWSClassification (amh-Ethi)": 78.83, "MasakhaNEWSClassification (eng)": 75.39, "MasakhaNEWSClassification (fra-Latn)": 72.94, "MasakhaNEWSClassification (hau-Latn)": 54.49, "MasakhaNEWSClassification (ibo-Latn)": 46.79, "MasakhaNEWSClassification (lin-Latn)": 69.77, "MasakhaNEWSClassification (lug-Latn)": 43.05, "MasakhaNEWSClassification (orm-Ethi)": 41.97, "MasakhaNEWSClassification (pcm-Latn)": 90.2, "MasakhaNEWSClassification (run-Latn)": 49.97, "MasakhaNEWSClassification (sna-Latn)": 59.78, "MasakhaNEWSClassification (som-Latn)": 47.65, "MasakhaNEWSClassification (swa-Latn)": 60.42, "MasakhaNEWSClassification (tir-Ethi)": 45.04, "MasakhaNEWSClassification (xho-Latn)": 48.82, "MasakhaNEWSClassification (yor-Latn)": 58.3, "MassiveIntentClassification (pl)": 64.29, "MassiveIntentClassification (fr)": 61.88, "MassiveIntentClassification (mal-Mlym)": 54.34, "MassiveIntentClassification (tel-Telu)": 52.85, "MassiveIntentClassification (jpn-Jpan)": 63.76, "MassiveIntentClassification (nld-Latn)": 63.57, "MassiveIntentClassification (jav-Latn)": 36.49, "MassiveIntentClassification (heb-Hebr)": 58.25, "MassiveIntentClassification (tam-Taml)": 50.18, "MassiveIntentClassification (slv-Latn)": 63.5, "MassiveIntentClassification (tha-Thai)": 61.12, "MassiveIntentClassification (fra-Latn)": 64.8, "MassiveIntentClassification (ind-Latn)": 65.43, "MassiveIntentClassification (amh-Ethi)": 41.56, "MassiveIntentClassification (en)": 69.32, "MassiveIntentClassification (nob-Latn)": 62.62, "MassiveIntentClassification (kan-Knda)": 50.62, "MassiveIntentClassification (dan-Latn)": 62.8, "MassiveIntentClassification (ell-Grek)": 62.63, "MassiveIntentClassification (msa-Latn)": 60.72, "MassiveIntentClassification (ita-Latn)": 64.69, "MassiveIntentClassification (tur-Latn)": 64.58, "MassiveIntentClassification (ben-Beng)": 48.79, "MassiveIntentClassification (aze-Latn)": 56.98, "MassiveIntentClassification (tgl-Latn)": 38.83, "MassiveIntentClassification (mon-Cyrl)": 56.61, "MassiveIntentClassification (urd-Arab)": 56.36, "MassiveIntentClassification (vie-Latn)": 59.71, "MassiveIntentClassification (cmo-Hans)": 65.32, "MassiveIntentClassification (cym-Latn)": 27.89, "MassiveIntentClassification (rus-Cyrl)": 63.23, "MassiveIntentClassification (mya-Mymr)": 57.08, "MassiveIntentClassification (hun-Latn)": 63.85, "MassiveIntentClassification (hin-Deva)": 62.79, "MassiveIntentClassification (hye-Armn)": 57.76, "MassiveIntentClassification (kat-Geor)": 49.88, "MassiveIntentClassification (fin-Latn)": 62.26, "MassiveIntentClassification (ara-Arab)": 51.43, "MassiveIntentClassification (por-Latn)": 64.88, "MassiveIntentClassification (pol-Latn)": 64.32, "MassiveIntentClassification (isl-Latn)": 37.09, "MassiveIntentClassification (afr-Latn)": 52.35, "MassiveIntentClassification (fas-Arab)": 65.33, "MassiveIntentClassification (khm-Khmr)": 45.48, "MassiveIntentClassification (kor-Kore)": 61.84, "MassiveIntentClassification (spa-Latn)": 64.45, "MassiveIntentClassification (cmo-Hant)": 62.33, "MassiveIntentClassification (ron-Latn)": 62.83, "MassiveIntentClassification (sqi-Latn)": 62.48, "MassiveIntentClassification (swa-Latn)": 31.93, "MassiveIntentClassification (swe-Latn)": 64.71, "MassiveIntentClassification (deu-Latn)": 59.56, "MassiveIntentClassification (lav-Latn)": 61.29, "MassiveScenarioClassification (pl)": 68.98, "MassiveScenarioClassification (fr)": 67.9, "MassiveScenarioClassification (tam-Taml)": 55.97, "MassiveScenarioClassification (heb-Hebr)": 65.16, "MassiveScenarioClassification (ind-Latn)": 70.73, "MassiveScenarioClassification (afr-Latn)": 59.68, "MassiveScenarioClassification (fin-Latn)": 67.58, "MassiveScenarioClassification (vie-Latn)": 65.7, "MassiveScenarioClassification (mon-Cyrl)": 60.84, "MassiveScenarioClassification (sqi-Latn)": 69.62, "MassiveScenarioClassification (nob-Latn)": 70.23, "MassiveScenarioClassification (por-Latn)": 70.08, "MassiveScenarioClassification (aze-Latn)": 61.52, "MassiveScenarioClassification (nld-Latn)": 70.37, "MassiveScenarioClassification (spa-Latn)": 70.4, "MassiveScenarioClassification (mal-Mlym)": 60.14, "MassiveScenarioClassification (cmo-Hant)": 68.71, "MassiveScenarioClassification (fra-Latn)": 70.71, "MassiveScenarioClassification (ita-Latn)": 69.74, "MassiveScenarioClassification (hun-Latn)": 70.31, "MassiveScenarioClassification (urd-Arab)": 62.92, "MassiveScenarioClassification (cym-Latn)": 35.27, "MassiveScenarioClassification (khm-Khmr)": 53.13, "MassiveScenarioClassification (swa-Latn)": 37.26, "MassiveScenarioClassification (mya-Mymr)": 63.03, "MassiveScenarioClassification (isl-Latn)": 44.16, "MassiveScenarioClassification (tha-Thai)": 69.44, "MassiveScenarioClassification (kat-Geor)": 57.3, "MassiveScenarioClassification (pol-Latn)": 68.99, "MassiveScenarioClassification (ell-Grek)": 68.81, "MassiveScenarioClassification (cmo-Hans)": 71.25, "MassiveScenarioClassification (tgl-Latn)": 43.98, "MassiveScenarioClassification (lav-Latn)": 66.28, "MassiveScenarioClassification (jpn-Jpan)": 69.68, "MassiveScenarioClassification (deu-Latn)": 67.35, "MassiveScenarioClassification (ara-Arab)": 57.79, "MassiveScenarioClassification (en)": 75.35, "MassiveScenarioClassification (msa-Latn)": 65.85, "MassiveScenarioClassification (tel-Telu)": 58.79, "MassiveScenarioClassification (ben-Beng)": 54.52, "MassiveScenarioClassification (kan-Knda)": 56.08, "MassiveScenarioClassification (tur-Latn)": 70.41, "MassiveScenarioClassification (kor-Kore)": 68.51, "MassiveScenarioClassification (hye-Armn)": 63.03, "MassiveScenarioClassification (jav-Latn)": 44.22, "MassiveScenarioClassification (rus-Cyrl)": 69.92, "MassiveScenarioClassification (hin-Deva)": 67.94, "MassiveScenarioClassification (amh-Ethi)": 48.96, "MassiveScenarioClassification (dan-Latn)": 71.04, "MassiveScenarioClassification (fas-Arab)": 69.88, "MassiveScenarioClassification (slv-Latn)": 70.81, "MassiveScenarioClassification (swe-Latn)": 71.6, "MassiveScenarioClassification (ron-Latn)": 67.94, "MultilingualSentiment (cmn-Hans)": 66.49, "NoRecClassification (nob-Latn)": 50.32, "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 41.57, "OnlineShopping (cmn-Hans)": 87.75, "PAC": 63.76, "PAC (pol-Latn)": 63.76, "PolEmo2.0-IN": 62.78, "PolEmo2.0-IN (pol-Latn)": 62.74, "PolEmo2.0-OUT": 19.98, "PolEmo2.0-OUT (pol-Latn)": 19.92, "RuReviewsClassification (rus-Cyrl)": 62.33, "RuSciBenchGRNTIClassification (rus-Cyrl)": 56.01, "RuSciBenchOECDClassification (rus-Cyrl)": 44.14, "TNews (cmn-Hans)": 43.73, "ToxicConversationsClassification": 65.56, "TweetSentimentExtractionClassification": 59.04, "Waimai (cmn-Hans)": 83.97 } ] }, "Clustering": { "v_measure": [ { "Model": "paraphrase-multilingual-mpnet-base-v2", "8TagsClustering": 25.62, "AlloProfClusteringP2P": 54.49, "AlloProfClusteringS2S": 44.79, "ArxivClusteringP2P": 37.78, "ArxivClusteringS2S": 31.68, "BiorxivClusteringP2P": 33.02, "BiorxivClusteringS2S": 29.45, "BlurbsClusteringP2P": 34.38, "BlurbsClusteringS2S": 15.81, "GeoreviewClusteringP2P (rus-Cyrl)": 56.18, "HALClusteringS2S": 23.97, "MLSUMClusteringP2P": 40.55, "MLSUMClusteringP2P (rus-Cyrl)": 35.95, "MLSUMClusteringS2S": 37.53, "MLSUMClusteringS2S (rus-Cyrl)": 38.88, "MasakhaNEWSClusteringP2P (fra)": 41.57, "MasakhaNEWSClusteringP2P (amh-Ethi)": 46.85, "MasakhaNEWSClusteringP2P (eng)": 47.3, "MasakhaNEWSClusteringP2P (fra-Latn)": 53.3, "MasakhaNEWSClusteringP2P (hau-Latn)": 27.61, "MasakhaNEWSClusteringP2P (ibo-Latn)": 41.32, "MasakhaNEWSClusteringP2P (lin-Latn)": 58.37, "MasakhaNEWSClusteringP2P (lug-Latn)": 47.56, "MasakhaNEWSClusteringP2P (orm-Ethi)": 24.53, "MasakhaNEWSClusteringP2P (pcm-Latn)": 66.55, "MasakhaNEWSClusteringP2P (run-Latn)": 51.97, "MasakhaNEWSClusteringP2P (sna-Latn)": 45.55, "MasakhaNEWSClusteringP2P (som-Latn)": 33.98, "MasakhaNEWSClusteringP2P (swa-Latn)": 25.03, "MasakhaNEWSClusteringP2P (tir-Ethi)": 48.33, "MasakhaNEWSClusteringP2P (xho-Latn)": 29.47, "MasakhaNEWSClusteringP2P (yor-Latn)": 28.25, "MasakhaNEWSClusteringS2S (fra)": 30.88, "MasakhaNEWSClusteringS2S (amh-Ethi)": 51.54, "MasakhaNEWSClusteringS2S (eng)": 43.28, "MasakhaNEWSClusteringS2S (fra-Latn)": 37.92, "MasakhaNEWSClusteringS2S (hau-Latn)": 17.97, "MasakhaNEWSClusteringS2S (ibo-Latn)": 34.56, "MasakhaNEWSClusteringS2S (lin-Latn)": 57.43, "MasakhaNEWSClusteringS2S (lug-Latn)": 45.22, "MasakhaNEWSClusteringS2S (orm-Ethi)": 21.9, "MasakhaNEWSClusteringS2S (pcm-Latn)": 62.1, "MasakhaNEWSClusteringS2S (run-Latn)": 46.81, "MasakhaNEWSClusteringS2S (sna-Latn)": 43.15, "MasakhaNEWSClusteringS2S (som-Latn)": 29.44, "MasakhaNEWSClusteringS2S (swa-Latn)": 10.31, "MasakhaNEWSClusteringS2S (tir-Ethi)": 51.95, "MasakhaNEWSClusteringS2S (xho-Latn)": 21.26, "MasakhaNEWSClusteringS2S (yor-Latn)": 28.88, "MedrxivClusteringP2P": 31.93, "MedrxivClusteringS2S": 31.53, "RedditClustering": 45.65, "RedditClusteringP2P": 52.05, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 48.47, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 42.9, "StackExchangeClustering": 52.99, "StackExchangeClusteringP2P": 33.06, "TenKGnadClusteringP2P": 35.96, "TenKGnadClusteringS2S": 22.0, "TwentyNewsgroupsClustering": 44.36 } ] }, "PairClassification": { "ap": [ { "Model": "paraphrase-multilingual-mpnet-base-v2", "CDSC-E": 75.76, "CDSC-E (pol-Latn)": 75.77, "OpusparcusPC (fr)": 93.45, "OpusparcusPC (deu-Latn)": 97.34, "OpusparcusPC (en)": 98.59, "OpusparcusPC (fin-Latn)": 95.33, "OpusparcusPC (fra-Latn)": 93.45, "OpusparcusPC (rus-Cyrl)": 90.47, "OpusparcusPC (swe-Latn)": 95.16, "PPC": 93.67, "PSC": 98.26, "PSC (pol-Latn)": 98.26, "PawsXPairClassification (fr)": 58.14, "PawsXPairClassification (deu-Latn)": 55.69, "PawsXPairClassification (en)": 60.12, "PawsXPairClassification (spa-Latn)": 56.94, "PawsXPairClassification (fra-Latn)": 58.14, "PawsXPairClassification (jpn-Hira)": 49.37, "PawsXPairClassification (kor-Hang)": 50.66, "PawsXPairClassification (cmn-Hans)": 55.47, "SICK-E-PL": 77.22, "SICK-E-PL (pol-Latn)": 77.22, "SprintDuplicateQuestions": 90.55, "TERRa (rus-Cyrl)": 64.57, "TwitterSemEval2015": 66.75, "TwitterURLCorpus": 85.14 } ] }, "Reranking": { "map": [ { "Model": "paraphrase-multilingual-mpnet-base-v2", "AlloprofReranking": 54.34, "AlloprofReranking (fra-Latn)": 67.2, "AskUbuntuDupQuestions": 60.16, "MMarcoReranking (cmn-Hans)": 14.57, "MindSmallReranking": 30.15, "RuBQReranking (rus-Cyrl)": 58.77, "SciDocsRR": 78.09, "StackOverflowDupQuestions": 46.78, "SyntecReranking": 83.23, "SyntecReranking (fra-Latn)": 80.97, "T2Reranking (cmn-Hans)": 64.49 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "paraphrase-multilingual-mpnet-base-v2", "AILACasedocs": 17.45, "AILAStatutes": 22.24, "ARCChallenge": 7.19, "AlloprofRetrieval": 30.8, "AlloprofRetrieval (fra-Latn)": 30.8, "AlphaNLI": 21.87, "ArguAna": 48.91, "ArguAna-PL": 42.62, "ArguAna-PL (pol-Latn)": 42.61, "BSARDRetrieval": 0.0, "BSARDRetrieval (fra-Latn)": 13.19, "CQADupstackRetrieval": 31.32, "ClimateFEVER": 15.27, "CmedqaRetrieval (cmn-Hans)": 10.15, "CovidRetrieval (cmn-Hans)": 28.85, "DBPedia": 26.22, "DBPedia-PL": 20.18, "DuRetrieval (cmn-Hans)": 33.41, "EcomRetrieval (cmn-Hans)": 9.69, "FEVER": 56.76, "FiQA-PL": 14.68, "FiQA-PL (pol-Latn)": 14.71, "FiQA2018": 22.96, "GerDaLIRSmall (deu-Latn)": 3.0, "HellaSwag": 17.53, "HotpotQA": 37.03, "HotpotQA-PL": 29.36, "LEMBNarrativeQARetrieval": 16.02, "LEMBNeedleRetrieval": 14.0, "LEMBPasskeyRetrieval": 7.75, "LEMBQMSumRetrieval": 12.23, "LEMBSummScreenFDRetrieval": 41.15, "LEMBWikimQARetrieval": 38.86, "LeCaRDv2 (zho-Hans)": 33.91, "LegalBenchConsumerContractsQA": 52.37, "LegalBenchCorporateLobbying": 87.62, "LegalQuAD (deu-Latn)": 17.8, "LegalSummarization": 56.8, "MMarcoRetrieval (cmn-Hans)": 44.62, "MSMARCO": 26.6, "MSMARCO-PL": 12.45, "MedicalRetrieval (cmn-Hans)": 14.1, "MintakaRetrieval (fr)": 24.45, "MintakaRetrieval (ara-Arab)": 14.55, "MintakaRetrieval (deu-Latn)": 25.43, "MintakaRetrieval (spa-Latn)": 24.94, "MintakaRetrieval (fra-Latn)": 24.45, "MintakaRetrieval (hin-Deva)": 18.67, "MintakaRetrieval (ita-Latn)": 25.62, "MintakaRetrieval (jpn-Hira)": 15.46, "MintakaRetrieval (por-Latn)": 26.15, "NFCorpus": 25.49, "NFCorpus-PL": 18.53, "NFCorpus-PL (pol-Latn)": 18.54, "NQ": 33.6, "NQ-PL": 15.64, "PIQA": 18.65, "Quail": 2.98, "Quora-PL": 79.18, "QuoraRetrieval": 86.4, "RARbCode": 11.02, "RARbMath": 30.93, "RiaNewsRetrieval (rus-Cyrl)": 51.75, "RuBQRetrieval (rus-Cyrl)": 37.04, "SCIDOCS": 13.97, "SCIDOCS-PL": 11.18, "SCIDOCS-PL (pol-Latn)": 11.17, "SIQA": 1.21, "SciFact": 50.3, "SciFact-PL": 41.53, "SciFact-PL (pol-Latn)": 41.55, "SpartQA": 5.69, "SyntecRetrieval": 76.0, "SyntecRetrieval (fra-Latn)": 76.0, "T2Retrieval (cmn-Hans)": 28.35, "TRECCOVID": 37.87, "TRECCOVID-PL": 35.38, "TRECCOVID-PL (pol-Latn)": 35.43, "TempReasonL1": 1.94, "TempReasonL2Fact": 5.34, "TempReasonL2Pure": 0.33, "TempReasonL3Fact": 6.79, "TempReasonL3Pure": 3.19, "Touche2020": 17.4, "VideoRetrieval (cmn-Hans)": 14.18, "WinoGrande": 49.01, "XPQARetrieval (fr)": 46.22, "XPQARetrieval (ara-Arab_ara-Arab)": 24.86, "XPQARetrieval (eng-Latn_ara-Arab)": 19.6, "XPQARetrieval (ara-Arab_eng-Latn)": 28.21, "XPQARetrieval (deu-Latn_deu-Latn)": 48.81, "XPQARetrieval (eng-Latn_deu-Latn)": 31.93, "XPQARetrieval (deu-Latn_eng-Latn)": 53.26, "XPQARetrieval (spa-Latn_spa-Latn)": 41.08, "XPQARetrieval (eng-Latn_spa-Latn)": 30.05, "XPQARetrieval (spa-Latn_eng-Latn)": 43.4, "XPQARetrieval (fra-Latn_fra-Latn)": 46.22, "XPQARetrieval (eng-Latn_fra-Latn)": 29.55, "XPQARetrieval (fra-Latn_eng-Latn)": 47.3, "XPQARetrieval (hin-Deva_hin-Deva)": 50.74, "XPQARetrieval (eng-Latn_hin-Deva)": 24.97, "XPQARetrieval (hin-Deva_eng-Latn)": 49.24, "XPQARetrieval (ita-Latn_ita-Latn)": 52.87, "XPQARetrieval (eng-Latn_ita-Latn)": 33.44, "XPQARetrieval (ita-Latn_eng-Latn)": 51.49, "XPQARetrieval (jpn-Hira_jpn-Hira)": 53.17, "XPQARetrieval (eng-Latn_jpn-Hira)": 26.66, "XPQARetrieval (jpn-Hira_eng-Latn)": 49.86, "XPQARetrieval (kor-Hang_kor-Hang)": 24.9, "XPQARetrieval (eng-Latn_kor-Hang)": 24.5, "XPQARetrieval (kor-Hang_eng-Latn)": 24.57, "XPQARetrieval (pol-Latn_pol-Latn)": 29.36, "XPQARetrieval (eng-Latn_pol-Latn)": 20.48, "XPQARetrieval (pol-Latn_eng-Latn)": 29.31, "XPQARetrieval (por-Latn_por-Latn)": 34.26, "XPQARetrieval (eng-Latn_por-Latn)": 21.72, "XPQARetrieval (por-Latn_eng-Latn)": 37.62, "XPQARetrieval (tam-Taml_tam-Taml)": 19.8, "XPQARetrieval (eng-Latn_tam-Taml)": 13.93, "XPQARetrieval (tam-Taml_eng-Latn)": 18.26, "XPQARetrieval (cmn-Hans_cmn-Hans)": 42.54, "XPQARetrieval (eng-Latn_cmn-Hans)": 20.91, "XPQARetrieval (cmn-Hans_eng-Latn)": 42.81 } ] }, "STS": { "spearman": [ { "Model": "paraphrase-multilingual-mpnet-base-v2", "AFQMC (cmn-Hans)": 15.69, "ATEC (cmn-Hans)": 20.27, "BIOSSES": 76.27, "BQ (cmn-Hans)": 36.33, "CDSC-R": 88.8, "CDSC-R (pol-Latn)": 88.8, "LCQMC (cmn-Hans)": 63.3, "PAWSX (cmn-Hans)": 12.16, "RUParaPhraserSTS (rus-Cyrl)": 65.74, "RuSTSBenchmarkSTS (rus-Cyrl)": 82.46, "SICK-R": 79.62, "SICK-R-PL": 73.13, "SICK-R-PL (pol-Latn)": 73.13, "SICKFr": 75.56, "SICKFr (fra-Latn)": 75.56, "STS12": 77.9, "STS13": 85.11, "STS14": 80.81, "STS15": 87.48, "STS16": 83.2, "STS17 (ar-ar)": 79.1, "STS17 (en-ar)": 80.85, "STS17 (en-de)": 83.28, "STS17 (en-en)": 86.99, "STS17 (en-tr)": 74.9, "STS17 (es-en)": 86.11, "STS17 (es-es)": 85.14, "STS17 (fr-en)": 81.17, "STS17 (it-en)": 84.24, "STS17 (ko-ko)": 83.41, "STS17 (nl-en)": 82.51, "STS17 (eng-Latn_deu-Latn)": 83.28, "STS17 (eng-Latn_tur-Latn)": 74.9, "STS17 (eng-Latn_ara-Arab)": 80.85, "STS17 (ara-Arab)": 79.1, "STS17 (nld-Latn_eng-Latn)": 82.51, "STS17 (fra-Latn_eng-Latn)": 81.17, "STS17 (ita-Latn_eng-Latn)": 84.24, "STS17 (spa-Latn_eng-Latn)": 86.11, "STS17 (spa-Latn)": 85.14, "STS17 (kor-Hang)": 83.41, "STS22 (pl)": 33.64, "STS22 (fr)": 74.3, "STS22 (spa-Latn)": 59.91, "STS22 (en)": 63.52, "STS22 (spa-Latn_ita-Latn)": 53.7, "STS22 (pol-Latn)": 33.65, "STS22 (ara-Arab)": 52.19, "STS22 (deu-Latn)": 46.7, "STS22 (fra-Latn)": 74.3, "STS22 (deu-Latn_pol-Latn)": 40.53, "STS22 (tur-Latn)": 56.3, "STS22 (cmn-Hans_eng-Latn)": 67.96, "STS22 (pol-Latn_eng-Latn)": 73.07, "STS22 (rus-Cyrl)": 58.74, "STS22 (cmn-Hans)": 61.75, "STS22 (spa-Latn_eng-Latn)": 70.26, "STS22 (fra-Latn_pol-Latn)": 84.52, "STS22 (deu-Latn_eng-Latn)": 50.81, "STS22 (deu-Latn_fra-Latn)": 62.34, "STS22 (ita-Latn)": 60.65, "STSB (cmn-Hans)": 80.84, "STSBenchmark": 86.82, "STSBenchmarkMultilingualSTS (fr)": 84.69, "STSBenchmarkMultilingualSTS (nld-Latn)": 83.36, "STSBenchmarkMultilingualSTS (deu-Latn)": 83.56, "STSBenchmarkMultilingualSTS (fra-Latn)": 84.69, "STSBenchmarkMultilingualSTS (spa-Latn)": 84.61, "STSBenchmarkMultilingualSTS (cmn-Hans)": 81.98, "STSBenchmarkMultilingualSTS (en)": 86.82, "STSBenchmarkMultilingualSTS (rus-Cyrl)": 82.45, "STSBenchmarkMultilingualSTS (por-Latn)": 84.0, "STSBenchmarkMultilingualSTS (ita-Latn)": 84.09, "STSBenchmarkMultilingualSTS (pol-Latn)": 81.46 } ] }, "Summarization": { "spearman": [ { "Model": "paraphrase-multilingual-mpnet-base-v2", "SummEval": 31.57, "SummEvalFr": 29.47, "SummEvalFr (fra-Latn)": 29.47 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "paraphrase-multilingual-mpnet-base-v2" } ] } }, "DanskBERT": { "BitextMining": { "f1": [ { "Model": "DanskBERT", "BornholmBitextMining": 6.34 } ] }, "Classification": { "accuracy": [ { "Model": "DanskBERT", "AngryTweetsClassification": 54.28, "DKHateClassification": 59.3, "DanishPoliticalCommentsClassification": 39.81, "LccSentimentClassification": 58.0, "MassiveIntentClassification (da)": 54.68, "MassiveIntentClassification (nb)": 45.38, "MassiveIntentClassification (sv)": 40.82, "MassiveScenarioClassification (da)": 59.56, "MassiveScenarioClassification (nb)": 47.55, "MassiveScenarioClassification (sv)": 40.14, "NoRecClassification": 46.06, "NordicLangClassification": 74.25, "NorwegianParliament": 56.79, "ScalaDaClassification": 66.59, "ScalaNbClassification": 59.99 } ] }, "Clustering": { "v_measure": [ { "Model": "DanskBERT" } ] }, "PairClassification": { "ap": [ { "Model": "DanskBERT" } ] }, "Reranking": { "map": [ { "Model": "DanskBERT" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "DanskBERT" } ] }, "STS": { "spearman": [ { "Model": "DanskBERT" } ] }, "Summarization": { "spearman": [ { "Model": "DanskBERT" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "DanskBERT" } ] } }, "dfm-encoder-large-v1": { "BitextMining": { "f1": [ { "Model": "dfm-encoder-large-v1", "BornholmBitextMining": 11.65 } ] }, "Classification": { "accuracy": [ { "Model": "dfm-encoder-large-v1", "AngryTweetsClassification": 53.8, "DKHateClassification": 60.09, "DanishPoliticalCommentsClassification": 36.6, "LccSentimentClassification": 57.33, "MassiveIntentClassification (da)": 60.55, "MassiveIntentClassification (nb)": 52.49, "MassiveIntentClassification (sv)": 49.74, "MassiveScenarioClassification (da)": 64.16, "MassiveScenarioClassification (nb)": 54.59, "MassiveScenarioClassification (sv)": 50.1, "NoRecClassification": 48.3, "NordicLangClassification": 77.68, "NorwegianParliament": 58.78, "ScalaDaClassification": 63.08, "ScalaNbClassification": 58.95 } ] }, "Clustering": { "v_measure": [ { "Model": "dfm-encoder-large-v1" } ] }, "PairClassification": { "ap": [ { "Model": "dfm-encoder-large-v1" } ] }, "Reranking": { "map": [ { "Model": "dfm-encoder-large-v1" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "dfm-encoder-large-v1" } ] }, "STS": { "spearman": [ { "Model": "dfm-encoder-large-v1" } ] }, "Summarization": { "spearman": [ { "Model": "dfm-encoder-large-v1" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "dfm-encoder-large-v1" } ] } }, "instructor-xl": { "BitextMining": { "f1": [ { "Model": "instructor-xl" } ] }, "Classification": { "accuracy": [ { "Model": "instructor-xl" } ] }, "Clustering": { "v_measure": [ { "Model": "instructor-xl" } ] }, "PairClassification": { "ap": [ { "Model": "instructor-xl" } ] }, "Reranking": { "map": [ { "Model": "instructor-xl" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "instructor-xl", "BrightRetrieval (aops)": 8.26, "BrightRetrieval (robotics)": 17.39, "BrightRetrieval (economics)": 22.81, "BrightRetrieval (stackoverflow)": 19.06, "BrightRetrieval (leetcode)": 27.5, "BrightRetrieval (theoremqa_questions)": 14.59, "BrightRetrieval (psychology)": 27.43, "BrightRetrieval (biology)": 21.91, "BrightRetrieval (theoremqa_theorems)": 6.5, "BrightRetrieval (earth_science)": 34.35, "BrightRetrieval (sustainable_living)": 18.82, "BrightRetrieval (pony)": 5.02 } ] }, "STS": { "spearman": [ { "Model": "instructor-xl" } ] }, "Summarization": { "spearman": [ { "Model": "instructor-xl" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "instructor-xl", "Core17InstructionRetrieval": 0.69, "News21InstructionRetrieval": -0.9, "Robust04InstructionRetrieval": -8.08 } ] } }, "GritLM-7B": { "BitextMining": { "f1": [ { "Model": "GritLM-7B", "BornholmBitextMining (dan-Latn)": 45.13, "Tatoeba (csb-Latn_eng-Latn)": 50.13, "Tatoeba (ceb-Latn_eng-Latn)": 33.5, "Tatoeba (cmn-Hans_eng-Latn)": 94.08, "Tatoeba (uzb-Latn_eng-Latn)": 41.69, "Tatoeba (kur-Latn_eng-Latn)": 27.94, "Tatoeba (ita-Latn_eng-Latn)": 91.2, "Tatoeba (lvs-Latn_eng-Latn)": 53.54, "Tatoeba (yid-Hebr_eng-Latn)": 17.13, "Tatoeba (gle-Latn_eng-Latn)": 48.14, "Tatoeba (ast-Latn_eng-Latn)": 79.11, "Tatoeba (ang-Latn_eng-Latn)": 76.84, "Tatoeba (jav-Latn_eng-Latn)": 26.6, "Tatoeba (ina-Latn_eng-Latn)": 91.24, "Tatoeba (nob-Latn_eng-Latn)": 93.53, "Tatoeba (swe-Latn_eng-Latn)": 90.43, "Tatoeba (lfn-Latn_eng-Latn)": 62.23, "Tatoeba (fin-Latn_eng-Latn)": 85.76, "Tatoeba (fry-Latn_eng-Latn)": 61.16, "Tatoeba (gsw-Latn_eng-Latn)": 53.28, "Tatoeba (rus-Cyrl_eng-Latn)": 91.82, "Tatoeba (tat-Cyrl_eng-Latn)": 24.46, "Tatoeba (mal-Mlym_eng-Latn)": 33.79, "Tatoeba (hrv-Latn_eng-Latn)": 91.04, "Tatoeba (ind-Latn_eng-Latn)": 90.05, "Tatoeba (tam-Taml_eng-Latn)": 46.27, "Tatoeba (kaz-Cyrl_eng-Latn)": 36.27, "Tatoeba (uig-Arab_eng-Latn)": 22.6, "Tatoeba (slv-Latn_eng-Latn)": 82.71, "Tatoeba (pms-Latn_eng-Latn)": 50.41, "Tatoeba (lit-Latn_eng-Latn)": 56.36, "Tatoeba (cha-Latn_eng-Latn)": 34.69, "Tatoeba (est-Latn_eng-Latn)": 46.73, "Tatoeba (mhr-Cyrl_eng-Latn)": 10.8, "Tatoeba (dan-Latn_eng-Latn)": 92.01, "Tatoeba (pol-Latn_eng-Latn)": 95.6, "Tatoeba (nov-Latn_eng-Latn)": 64.85, "Tatoeba (swh-Latn_eng-Latn)": 46.09, "Tatoeba (tha-Thai_eng-Latn)": 81.25, "Tatoeba (arz-Arab_eng-Latn)": 52.97, "Tatoeba (epo-Latn_eng-Latn)": 76.87, "Tatoeba (deu-Latn_eng-Latn)": 98.02, "Tatoeba (hye-Armn_eng-Latn)": 35.94, "Tatoeba (afr-Latn_eng-Latn)": 79.17, "Tatoeba (gla-Latn_eng-Latn)": 40.8, "Tatoeba (isl-Latn_eng-Latn)": 74.94, "Tatoeba (awa-Deva_eng-Latn)": 44.31, "Tatoeba (ido-Latn_eng-Latn)": 65.69, "Tatoeba (kor-Hang_eng-Latn)": 87.43, "Tatoeba (amh-Ethi_eng-Latn)": 6.18, "Tatoeba (eus-Latn_eng-Latn)": 31.88, "Tatoeba (mkd-Cyrl_eng-Latn)": 73.82, "Tatoeba (tur-Latn_eng-Latn)": 86.62, "Tatoeba (pes-Arab_eng-Latn)": 78.98, "Tatoeba (heb-Hebr_eng-Latn)": 61.75, "Tatoeba (aze-Latn_eng-Latn)": 64.11, "Tatoeba (hun-Latn_eng-Latn)": 88.54, "Tatoeba (bul-Cyrl_eng-Latn)": 90.37, "Tatoeba (kab-Latn_eng-Latn)": 2.9, "Tatoeba (cat-Latn_eng-Latn)": 90.66, "Tatoeba (dsb-Latn_eng-Latn)": 51.72, "Tatoeba (kat-Geor_eng-Latn)": 38.42, "Tatoeba (urd-Arab_eng-Latn)": 68.02, "Tatoeba (wuu-Hans_eng-Latn)": 80.28, "Tatoeba (oci-Latn_eng-Latn)": 58.12, "Tatoeba (arq-Arab_eng-Latn)": 30.52, "Tatoeba (ron-Latn_eng-Latn)": 90.29, "Tatoeba (bos-Latn_eng-Latn)": 87.33, "Tatoeba (nds-Latn_eng-Latn)": 64.54, "Tatoeba (tgl-Latn_eng-Latn)": 83.24, "Tatoeba (glg-Latn_eng-Latn)": 86.69, "Tatoeba (ben-Beng_eng-Latn)": 61.32, "Tatoeba (khm-Khmr_eng-Latn)": 16.4, "Tatoeba (ukr-Cyrl_eng-Latn)": 90.19, "Tatoeba (max-Deva_eng-Latn)": 51.87, "Tatoeba (lat-Latn_eng-Latn)": 80.43, "Tatoeba (xho-Latn_eng-Latn)": 28.43, "Tatoeba (spa-Latn_eng-Latn)": 96.75, "Tatoeba (tzl-Latn_eng-Latn)": 42.85, "Tatoeba (ara-Arab_eng-Latn)": 76.77, "Tatoeba (vie-Latn_eng-Latn)": 91.32, "Tatoeba (ces-Latn_eng-Latn)": 92.02, "Tatoeba (jpn-Jpan_eng-Latn)": 91.9, "Tatoeba (bel-Cyrl_eng-Latn)": 76.21, "Tatoeba (mon-Cyrl_eng-Latn)": 27.38, "Tatoeba (nld-Latn_eng-Latn)": 94.96, "Tatoeba (war-Latn_eng-Latn)": 27.75, "Tatoeba (bre-Latn_eng-Latn)": 12.59, "Tatoeba (por-Latn_eng-Latn)": 93.41, "Tatoeba (ile-Latn_eng-Latn)": 76.72, "Tatoeba (mar-Deva_eng-Latn)": 51.54, "Tatoeba (fao-Latn_eng-Latn)": 62.03, "Tatoeba (slk-Latn_eng-Latn)": 84.96, "Tatoeba (tel-Telu_eng-Latn)": 24.26, "Tatoeba (cym-Latn_eng-Latn)": 50.03, "Tatoeba (srp-Cyrl_eng-Latn)": 88.45, "Tatoeba (swg-Latn_eng-Latn)": 52.09, "Tatoeba (hin-Deva_eng-Latn)": 84.19, "Tatoeba (yue-Hant_eng-Latn)": 79.5, "Tatoeba (fra-Latn_eng-Latn)": 92.47, "Tatoeba (cor-Latn_eng-Latn)": 6.97, "Tatoeba (hsb-Latn_eng-Latn)": 64.48, "Tatoeba (zsm-Latn_eng-Latn)": 90.06, "Tatoeba (ber-Tfng_eng-Latn)": 6.2, "Tatoeba (pam-Latn_eng-Latn)": 12.11, "Tatoeba (kzj-Latn_eng-Latn)": 9.61, "Tatoeba (dtp-Latn_eng-Latn)": 8.37, "Tatoeba (nno-Latn_eng-Latn)": 80.89, "Tatoeba (ell-Grek_eng-Latn)": 80.13, "Tatoeba (orv-Cyrl_eng-Latn)": 45.88, "Tatoeba (sqi-Latn_eng-Latn)": 54.37, "Tatoeba (tuk-Latn_eng-Latn)": 30.47, "Tatoeba (cbk-Latn_eng-Latn)": 67.64 } ] }, "Classification": { "accuracy": [ { "Model": "GritLM-7B", "AllegroReviews (pol-Latn)": 37.32, "AmazonCounterfactualClassification (en-ext)": 70.34, "AmazonCounterfactualClassification (en)": 71.1, "AmazonCounterfactualClassification (deu-Latn)": 67.63, "AmazonCounterfactualClassification (jpn-Jpan)": 73.3, "AmazonPolarityClassification": 86.69, "AmazonReviewsClassification (en)": 45.51, "AmazonReviewsClassification (deu-Latn)": 43.77, "AmazonReviewsClassification (spa-Latn)": 43.0, "AmazonReviewsClassification (fra-Latn)": 44.15, "AmazonReviewsClassification (jpn-Jpan)": 41.49, "AmazonReviewsClassification (cmn-Hans)": 35.34, "AngryTweetsClassification (dan-Latn)": 54.68, "Banking77Classification": 79.36, "CBD (pol-Latn)": 70.98, "DanishPoliticalCommentsClassification (dan-Latn)": 37.69, "EmotionClassification": 48.79, "GeoreviewClassification (rus-Cyrl)": 45.72, "HeadlineClassification (rus-Cyrl)": 78.05, "IFlyTek (cmn-Hans)": 48.49, "ImdbClassification": 82.25, "InappropriatenessClassification (rus-Cyrl)": 60.11, "JDReview (cmn-Hans)": 84.02, "KinopoiskClassification (rus-Cyrl)": 56.14, "LccSentimentClassification (dan-Latn)": 57.2, "MTOPDomainClassification (en)": 92.67, "MTOPDomainClassification (deu-Latn)": 88.32, "MTOPDomainClassification (spa-Latn)": 88.45, "MTOPDomainClassification (fra-Latn)": 88.44, "MTOPDomainClassification (hin-Deva)": 86.89, "MTOPDomainClassification (tha-Thai)": 82.97, "MTOPIntentClassification (en)": 69.77, "MTOPIntentClassification (deu-Latn)": 69.53, "MTOPIntentClassification (spa-Latn)": 67.49, "MTOPIntentClassification (fra-Latn)": 65.93, "MTOPIntentClassification (hin-Deva)": 59.47, "MTOPIntentClassification (tha-Thai)": 65.14, "MasakhaNEWSClassification (amh-Ethi)": 53.06, "MasakhaNEWSClassification (eng)": 77.57, "MasakhaNEWSClassification (fra-Latn)": 77.39, "MasakhaNEWSClassification (hau-Latn)": 74.66, "MasakhaNEWSClassification (ibo-Latn)": 68.64, "MasakhaNEWSClassification (lin-Latn)": 74.23, "MasakhaNEWSClassification (lug-Latn)": 72.33, "MasakhaNEWSClassification (orm-Ethi)": 77.6, "MasakhaNEWSClassification (pcm-Latn)": 91.28, "MasakhaNEWSClassification (run-Latn)": 76.3, "MasakhaNEWSClassification (sna-Latn)": 85.99, "MasakhaNEWSClassification (som-Latn)": 63.71, "MasakhaNEWSClassification (swa-Latn)": 73.4, "MasakhaNEWSClassification (tir-Ethi)": 34.41, "MasakhaNEWSClassification (xho-Latn)": 83.27, "MasakhaNEWSClassification (yor-Latn)": 80.92, "MassiveIntentClassification (mya-Mymr)": 36.92, "MassiveIntentClassification (en)": 71.52, "MassiveIntentClassification (slv-Latn)": 63.08, "MassiveIntentClassification (sqi-Latn)": 50.98, "MassiveIntentClassification (kor-Kore)": 65.71, "MassiveIntentClassification (aze-Latn)": 56.24, "MassiveIntentClassification (isl-Latn)": 51.96, "MassiveIntentClassification (hin-Deva)": 61.18, "MassiveIntentClassification (dan-Latn)": 65.39, "MassiveIntentClassification (vie-Latn)": 62.05, "MassiveIntentClassification (heb-Hebr)": 57.71, "MassiveIntentClassification (tur-Latn)": 65.26, "MassiveIntentClassification (cmo-Hans)": 67.43, "MassiveIntentClassification (khm-Khmr)": 38.86, "MassiveIntentClassification (deu-Latn)": 67.75, "MassiveIntentClassification (fas-Arab)": 65.98, "MassiveIntentClassification (jav-Latn)": 50.25, "MassiveIntentClassification (nld-Latn)": 66.82, "MassiveIntentClassification (jpn-Jpan)": 68.56, "MassiveIntentClassification (ita-Latn)": 68.04, "MassiveIntentClassification (cym-Latn)": 48.59, "MassiveIntentClassification (pol-Latn)": 67.97, "MassiveIntentClassification (fin-Latn)": 60.55, "MassiveIntentClassification (tha-Thai)": 58.99, "MassiveIntentClassification (lav-Latn)": 51.12, "MassiveIntentClassification (mal-Mlym)": 43.57, "MassiveIntentClassification (hun-Latn)": 63.48, "MassiveIntentClassification (ind-Latn)": 65.58, "MassiveIntentClassification (por-Latn)": 67.76, "MassiveIntentClassification (tel-Telu)": 44.73, "MassiveIntentClassification (amh-Ethi)": 34.73, "MassiveIntentClassification (kan-Knda)": 44.51, "MassiveIntentClassification (spa-Latn)": 66.45, "MassiveIntentClassification (urd-Arab)": 54.11, "MassiveIntentClassification (kat-Geor)": 42.01, "MassiveIntentClassification (tam-Taml)": 43.48, "MassiveIntentClassification (afr-Latn)": 59.48, "MassiveIntentClassification (rus-Cyrl)": 69.41, "MassiveIntentClassification (tgl-Latn)": 61.83, "MassiveIntentClassification (ell-Grek)": 60.45, "MassiveIntentClassification (hye-Armn)": 43.12, "MassiveIntentClassification (ara-Arab)": 54.46, "MassiveIntentClassification (fra-Latn)": 67.69, "MassiveIntentClassification (mon-Cyrl)": 40.84, "MassiveIntentClassification (msa-Latn)": 62.61, "MassiveIntentClassification (nob-Latn)": 63.58, "MassiveIntentClassification (ben-Beng)": 52.6, "MassiveIntentClassification (cmo-Hant)": 62.06, "MassiveIntentClassification (ron-Latn)": 62.45, "MassiveIntentClassification (swe-Latn)": 67.73, "MassiveIntentClassification (swa-Latn)": 50.1, "MassiveScenarioClassification (cmo-Hant)": 67.7, "MassiveScenarioClassification (kat-Geor)": 49.31, "MassiveScenarioClassification (ind-Latn)": 72.36, "MassiveScenarioClassification (amh-Ethi)": 42.0, "MassiveScenarioClassification (ita-Latn)": 71.86, "MassiveScenarioClassification (tur-Latn)": 68.71, "MassiveScenarioClassification (tel-Telu)": 50.8, "MassiveScenarioClassification (ell-Grek)": 67.42, "MassiveScenarioClassification (deu-Latn)": 73.64, "MassiveScenarioClassification (sqi-Latn)": 57.5, "MassiveScenarioClassification (cym-Latn)": 57.36, "MassiveScenarioClassification (spa-Latn)": 71.12, "MassiveScenarioClassification (nld-Latn)": 72.47, "MassiveScenarioClassification (swa-Latn)": 58.93, "MassiveScenarioClassification (cmo-Hans)": 71.91, "MassiveScenarioClassification (fin-Latn)": 65.91, "MassiveScenarioClassification (por-Latn)": 70.99, "MassiveScenarioClassification (hun-Latn)": 69.68, "MassiveScenarioClassification (slv-Latn)": 70.25, "MassiveScenarioClassification (urd-Arab)": 62.48, "MassiveScenarioClassification (hye-Armn)": 49.32, "MassiveScenarioClassification (pol-Latn)": 71.86, "MassiveScenarioClassification (khm-Khmr)": 45.52, "MassiveScenarioClassification (kan-Knda)": 49.51, "MassiveScenarioClassification (hin-Deva)": 66.18, "MassiveScenarioClassification (heb-Hebr)": 63.3, "MassiveScenarioClassification (rus-Cyrl)": 73.87, "MassiveScenarioClassification (mal-Mlym)": 48.53, "MassiveScenarioClassification (afr-Latn)": 67.34, "MassiveScenarioClassification (vie-Latn)": 69.19, "MassiveScenarioClassification (fra-Latn)": 70.79, "MassiveScenarioClassification (ben-Beng)": 58.75, "MassiveScenarioClassification (lav-Latn)": 57.3, "MassiveScenarioClassification (tam-Taml)": 50.9, "MassiveScenarioClassification (en)": 73.87, "MassiveScenarioClassification (aze-Latn)": 61.74, "MassiveScenarioClassification (swe-Latn)": 73.24, "MassiveScenarioClassification (kor-Kore)": 70.76, "MassiveScenarioClassification (ron-Latn)": 68.54, "MassiveScenarioClassification (msa-Latn)": 69.72, "MassiveScenarioClassification (mya-Mymr)": 44.25, "MassiveScenarioClassification (fas-Arab)": 70.5, "MassiveScenarioClassification (tha-Thai)": 64.51, "MassiveScenarioClassification (jpn-Jpan)": 72.81, "MassiveScenarioClassification (nob-Latn)": 69.75, "MassiveScenarioClassification (tgl-Latn)": 69.0, "MassiveScenarioClassification (dan-Latn)": 71.51, "MassiveScenarioClassification (ara-Arab)": 61.51, "MassiveScenarioClassification (jav-Latn)": 58.24, "MassiveScenarioClassification (isl-Latn)": 61.61, "MassiveScenarioClassification (mon-Cyrl)": 46.6, "MultilingualSentiment (cmn-Hans)": 68.13, "NoRecClassification (nob-Latn)": 52.05, "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 63.6, "OnlineShopping (cmn-Hans)": 86.99, "PAC (pol-Latn)": 68.09, "PolEmo2.0-IN (pol-Latn)": 66.07, "PolEmo2.0-OUT (pol-Latn)": 32.94, "RuReviewsClassification (rus-Cyrl)": 61.42, "RuSciBenchGRNTIClassification (rus-Cyrl)": 58.93, "RuSciBenchOECDClassification (rus-Cyrl)": 45.83, "TNews (cmn-Hans)": 49.94, "ToxicConversationsClassification": 63.9, "TweetSentimentExtractionClassification": 57.14, "Waimai (cmn-Hans)": 84.92 } ] }, "Clustering": { "v_measure": [ { "Model": "GritLM-7B", "GeoreviewClusteringP2P (rus-Cyrl)": 74.06, "MasakhaNEWSClusteringP2P (amh-Ethi)": 45.2, "MasakhaNEWSClusteringP2P (eng)": 70.5, "MasakhaNEWSClusteringP2P (fra-Latn)": 73.54, "MasakhaNEWSClusteringP2P (hau-Latn)": 51.33, "MasakhaNEWSClusteringP2P (ibo-Latn)": 66.75, "MasakhaNEWSClusteringP2P (lin-Latn)": 59.57, "MasakhaNEWSClusteringP2P (lug-Latn)": 58.93, "MasakhaNEWSClusteringP2P (orm-Ethi)": 54.38, "MasakhaNEWSClusteringP2P (pcm-Latn)": 92.67, "MasakhaNEWSClusteringP2P (run-Latn)": 59.51, "MasakhaNEWSClusteringP2P (sna-Latn)": 68.86, "MasakhaNEWSClusteringP2P (som-Latn)": 41.42, "MasakhaNEWSClusteringP2P (swa-Latn)": 33.61, "MasakhaNEWSClusteringP2P (tir-Ethi)": 51.68, "MasakhaNEWSClusteringP2P (xho-Latn)": 46.65, "MasakhaNEWSClusteringP2P (yor-Latn)": 52.39, "MasakhaNEWSClusteringS2S (amh-Ethi)": 43.39, "MasakhaNEWSClusteringS2S (eng)": 65.85, "MasakhaNEWSClusteringS2S (fra-Latn)": 68.87, "MasakhaNEWSClusteringS2S (hau-Latn)": 33.02, "MasakhaNEWSClusteringS2S (ibo-Latn)": 64.55, "MasakhaNEWSClusteringS2S (lin-Latn)": 72.01, "MasakhaNEWSClusteringS2S (lug-Latn)": 47.42, "MasakhaNEWSClusteringS2S (orm-Ethi)": 32.59, "MasakhaNEWSClusteringS2S (pcm-Latn)": 97.82, "MasakhaNEWSClusteringS2S (run-Latn)": 59.41, "MasakhaNEWSClusteringS2S (sna-Latn)": 71.58, "MasakhaNEWSClusteringS2S (som-Latn)": 40.91, "MasakhaNEWSClusteringS2S (swa-Latn)": 33.54, "MasakhaNEWSClusteringS2S (tir-Ethi)": 45.32, "MasakhaNEWSClusteringS2S (xho-Latn)": 28.94, "MasakhaNEWSClusteringS2S (yor-Latn)": 63.26, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 60.01, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 51.66 } ] }, "PairClassification": { "ap": [ { "Model": "GritLM-7B", "CDSC-E (pol-Latn)": 72.65, "OpusparcusPC (deu-Latn)": 96.65, "OpusparcusPC (en)": 98.57, "OpusparcusPC (fin-Latn)": 90.41, "OpusparcusPC (fra-Latn)": 93.41, "OpusparcusPC (rus-Cyrl)": 88.63, "OpusparcusPC (swe-Latn)": 94.04, "PSC (pol-Latn)": 99.43, "PawsXPairClassification (deu-Latn)": 58.5, "PawsXPairClassification (en)": 63.78, "PawsXPairClassification (spa-Latn)": 59.15, "PawsXPairClassification (fra-Latn)": 61.89, "PawsXPairClassification (jpn-Hira)": 51.46, "PawsXPairClassification (kor-Hang)": 52.15, "PawsXPairClassification (cmn-Hans)": 57.66, "SICK-E-PL (pol-Latn)": 75.98, "SprintDuplicateQuestions": 93.06, "TERRa (rus-Cyrl)": 59.39, "TwitterSemEval2015": 71.24, "TwitterURLCorpus": 84.54 } ] }, "Reranking": { "map": [ { "Model": "GritLM-7B", "AlloprofReranking (fra-Latn)": 77.95, "AskUbuntuDupQuestions": 61.11, "MMarcoReranking (cmn-Hans)": 21.7, "MindSmallReranking": 31.53, "RuBQReranking (rus-Cyrl)": 72.41, "SciDocsRR": 84.78, "StackOverflowDupQuestions": 50.95, "SyntecReranking (fra-Latn)": 83.32, "T2Reranking (cmn-Hans)": 65.63 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "GritLM-7B", "AILACasedocs": 35.31, "AILAStatutes": 41.8, "ARCChallenge": 26.68, "AlloprofRetrieval (fra-Latn)": 55.42, "AlphaNLI": 34.0, "ArguAna": 63.17, "ArguAna-PL (pol-Latn)": 48.89, "BSARDRetrieval (fra-Latn)": 26.63, "BrightRetrieval (pony)": 21.98, "BrightRetrieval (robotics)": 17.31, "BrightRetrieval (economics)": 19.0, "BrightRetrieval (theoremqa_questions)": 23.34, "BrightRetrieval (leetcode)": 29.85, "BrightRetrieval (earth_science)": 32.77, "BrightRetrieval (stackoverflow)": 11.62, "BrightRetrieval (sustainable_living)": 18.04, "BrightRetrieval (biology)": 25.04, "BrightRetrieval (psychology)": 19.92, "BrightRetrieval (theoremqa_theorems)": 17.41, "BrightRetrieval (aops)": 8.91, "CmedqaRetrieval (cmn-Hans)": 35.58, "CovidRetrieval (cmn-Hans)": 73.47, "DuRetrieval (cmn-Hans)": 88.18, "EcomRetrieval (cmn-Hans)": 54.33, "FiQA-PL (pol-Latn)": 38.04, "FiQA2018": 59.91, "GerDaLIRSmall (deu-Latn)": 20.61, "HellaSwag": 39.45, "LEMBNarrativeQARetrieval": 41.46, "LEMBNeedleRetrieval": 33.25, "LEMBPasskeyRetrieval": 38.25, "LEMBQMSumRetrieval": 30.32, "LEMBSummScreenFDRetrieval": 78.49, "LEMBWikimQARetrieval": 60.8, "LeCaRDv2 (zho-Hans)": 64.05, "LegalBenchConsumerContractsQA": 82.1, "LegalBenchCorporateLobbying": 95.0, "LegalQuAD (deu-Latn)": 44.18, "LegalSummarization": 70.64, "MMarcoRetrieval (cmn-Hans)": 76.54, "MedicalRetrieval (cmn-Hans)": 55.81, "MintakaRetrieval (ara-Arab)": 25.88, "MintakaRetrieval (deu-Latn)": 55.66, "MintakaRetrieval (spa-Latn)": 53.36, "MintakaRetrieval (fra-Latn)": 51.68, "MintakaRetrieval (hin-Deva)": 26.06, "MintakaRetrieval (ita-Latn)": 54.91, "MintakaRetrieval (jpn-Hira)": 34.1, "MintakaRetrieval (por-Latn)": 54.91, "NFCorpus": 40.86, "NFCorpus-PL (pol-Latn)": 32.88, "PIQA": 44.35, "Quail": 11.69, "RARbCode": 84.0, "RARbMath": 82.35, "RuBQRetrieval (rus-Cyrl)": 70.94, "SCIDOCS": 24.4, "SCIDOCS-PL (pol-Latn)": 18.39, "SIQA": 7.23, "SciFact": 79.13, "SciFact-PL (pol-Latn)": 73.22, "SpartQA": 9.29, "SyntecRetrieval (fra-Latn)": 89.48, "T2Retrieval (cmn-Hans)": 82.96, "TRECCOVID": 74.36, "TRECCOVID-PL (pol-Latn)": 58.01, "TempReasonL1": 7.15, "TempReasonL2Fact": 58.38, "TempReasonL2Pure": 11.22, "TempReasonL3Fact": 44.29, "TempReasonL3Pure": 14.15, "Touche2020": 27.81, "VideoRetrieval (cmn-Hans)": 53.85, "WinoGrande": 53.74, "XPQARetrieval (ara-Arab_ara-Arab)": 45.21, "XPQARetrieval (eng-Latn_ara-Arab)": 27.32, "XPQARetrieval (ara-Arab_eng-Latn)": 39.43, "XPQARetrieval (deu-Latn_deu-Latn)": 76.58, "XPQARetrieval (eng-Latn_deu-Latn)": 55.44, "XPQARetrieval (deu-Latn_eng-Latn)": 72.56, "XPQARetrieval (spa-Latn_spa-Latn)": 64.55, "XPQARetrieval (eng-Latn_spa-Latn)": 45.49, "XPQARetrieval (spa-Latn_eng-Latn)": 61.03, "XPQARetrieval (fra-Latn_fra-Latn)": 70.85, "XPQARetrieval (eng-Latn_fra-Latn)": 48.14, "XPQARetrieval (fra-Latn_eng-Latn)": 66.96, "XPQARetrieval (hin-Deva_hin-Deva)": 74.75, "XPQARetrieval (eng-Latn_hin-Deva)": 25.61, "XPQARetrieval (hin-Deva_eng-Latn)": 63.9, "XPQARetrieval (ita-Latn_ita-Latn)": 76.53, "XPQARetrieval (eng-Latn_ita-Latn)": 46.88, "XPQARetrieval (ita-Latn_eng-Latn)": 71.03, "XPQARetrieval (jpn-Hira_jpn-Hira)": 72.27, "XPQARetrieval (eng-Latn_jpn-Hira)": 41.94, "XPQARetrieval (jpn-Hira_eng-Latn)": 69.42, "XPQARetrieval (kor-Hang_kor-Hang)": 40.64, "XPQARetrieval (eng-Latn_kor-Hang)": 32.68, "XPQARetrieval (kor-Hang_eng-Latn)": 36.0, "XPQARetrieval (pol-Latn_pol-Latn)": 50.74, "XPQARetrieval (eng-Latn_pol-Latn)": 33.14, "XPQARetrieval (pol-Latn_eng-Latn)": 48.06, "XPQARetrieval (por-Latn_por-Latn)": 49.86, "XPQARetrieval (eng-Latn_por-Latn)": 33.01, "XPQARetrieval (por-Latn_eng-Latn)": 48.45, "XPQARetrieval (tam-Taml_tam-Taml)": 41.78, "XPQARetrieval (eng-Latn_tam-Taml)": 10.95, "XPQARetrieval (tam-Taml_eng-Latn)": 21.28, "XPQARetrieval (cmn-Hans_cmn-Hans)": 65.29, "XPQARetrieval (eng-Latn_cmn-Hans)": 35.86, "XPQARetrieval (cmn-Hans_eng-Latn)": 58.12 } ] }, "STS": { "spearman": [ { "Model": "GritLM-7B", "AFQMC (cmn-Hans)": 32.65, "ATEC (cmn-Hans)": 37.34, "BIOSSES": 85.01, "BQ (cmn-Hans)": 38.03, "CDSC-R (pol-Latn)": 92.23, "LCQMC (cmn-Hans)": 71.38, "PAWSX (cmn-Hans)": 16.4, "RUParaPhraserSTS (rus-Cyrl)": 71.08, "RuSTSBenchmarkSTS (rus-Cyrl)": 76.26, "SICK-R": 81.47, "SICK-R-PL (pol-Latn)": 72.78, "SICKFr (fra-Latn)": 76.91, "STS12": 65.84, "STS13": 78.37, "STS14": 77.52, "STS15": 85.43, "STS16": 79.94, "STS17 (ita-Latn_eng-Latn)": 88.42, "STS17 (fra-Latn_eng-Latn)": 87.9, "STS17 (kor-Hang)": 78.74, "STS17 (en-en)": 90.12, "STS17 (nld-Latn_eng-Latn)": 88.29, "STS17 (ara-Arab)": 79.28, "STS17 (eng-Latn_deu-Latn)": 88.92, "STS17 (spa-Latn)": 87.12, "STS17 (eng-Latn_tur-Latn)": 77.47, "STS17 (spa-Latn_eng-Latn)": 87.47, "STS17 (eng-Latn_ara-Arab)": 74.45, "STS22 (spa-Latn_eng-Latn)": 80.76, "STS22 (ara-Arab)": 55.45, "STS22 (pol-Latn_eng-Latn)": 77.77, "STS22 (deu-Latn_pol-Latn)": 55.09, "STS22 (en)": 68.59, "STS22 (rus-Cyrl)": 68.46, "STS22 (deu-Latn_eng-Latn)": 62.33, "STS22 (cmn-Hans)": 72.29, "STS22 (pol-Latn)": 48.07, "STS22 (fra-Latn)": 83.09, "STS22 (cmn-Hans_eng-Latn)": 72.73, "STS22 (deu-Latn_fra-Latn)": 62.14, "STS22 (spa-Latn_ita-Latn)": 77.63, "STS22 (fra-Latn_pol-Latn)": 84.52, "STS22 (ita-Latn)": 77.58, "STS22 (spa-Latn)": 72.24, "STS22 (deu-Latn)": 59.34, "STS22 (tur-Latn)": 70.83, "STSB (cmn-Hans)": 74.11, "STSBenchmark": 83.1, "STSBenchmarkMultilingualSTS (spa-Latn)": 79.51, "STSBenchmarkMultilingualSTS (ita-Latn)": 76.24, "STSBenchmarkMultilingualSTS (por-Latn)": 76.61, "STSBenchmarkMultilingualSTS (fra-Latn)": 77.48, "STSBenchmarkMultilingualSTS (deu-Latn)": 77.57, "STSBenchmarkMultilingualSTS (en)": 83.12, "STSBenchmarkMultilingualSTS (nld-Latn)": 74.83, "STSBenchmarkMultilingualSTS (pol-Latn)": 74.67, "STSBenchmarkMultilingualSTS (cmn-Hans)": 75.27, "STSBenchmarkMultilingualSTS (rus-Cyrl)": 76.19 } ] }, "Summarization": { "spearman": [ { "Model": "GritLM-7B", "SummEval": 30.26, "SummEvalFr (fra-Latn)": 29.97 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "GritLM-7B", "Core17InstructionRetrieval": 2.62, "News21InstructionRetrieval": -1.01, "Robust04InstructionRetrieval": -1.68 } ] } }, "use-cmlm-multilingual": { "BitextMining": { "f1": [ { "Model": "use-cmlm-multilingual" } ] }, "Classification": { "accuracy": [ { "Model": "use-cmlm-multilingual" } ] }, "Clustering": { "v_measure": [ { "Model": "use-cmlm-multilingual", "BlurbsClusteringP2P": 29.63, "BlurbsClusteringS2S": 15.24, "TenKGnadClusteringP2P": 37.1, "TenKGnadClusteringS2S": 25.64 } ] }, "PairClassification": { "ap": [ { "Model": "use-cmlm-multilingual" } ] }, "Reranking": { "map": [ { "Model": "use-cmlm-multilingual" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "use-cmlm-multilingual" } ] }, "STS": { "spearman": [ { "Model": "use-cmlm-multilingual" } ] }, "Summarization": { "spearman": [ { "Model": "use-cmlm-multilingual" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "use-cmlm-multilingual" } ] } }, "LLM2Vec-Sheared-Llama-supervised": { "BitextMining": { "f1": [ { "Model": "LLM2Vec-Sheared-Llama-supervised" } ] }, "Classification": { "accuracy": [ { "Model": "LLM2Vec-Sheared-Llama-supervised", "AmazonCounterfactualClassification (en)": 77.42, "AmazonPolarityClassification": 82.05, "AmazonReviewsClassification (en)": 40.81, "Banking77Classification": 86.01, "EmotionClassification": 48.38, "ImdbClassification": 75.33, "MTOPDomainClassification (en)": 94.09, "MTOPIntentClassification (en)": 77.05, "MassiveIntentClassification (en)": 75.58, "MassiveScenarioClassification (en)": 79.16, "ToxicConversationsClassification": 69.92, "TweetSentimentExtractionClassification": 60.76 } ] }, "Clustering": { "v_measure": [ { "Model": "LLM2Vec-Sheared-Llama-supervised", "ArxivClusteringP2P": 43.47, "ArxivClusteringS2S": 39.85, "BiorxivClusteringP2P": 37.1, "BiorxivClusteringS2S": 34.28, "MedrxivClusteringP2P": 33.55, "MedrxivClusteringS2S": 31.11, "RedditClustering": 53.02, "RedditClusteringP2P": 60.47, "StackExchangeClustering": 63.04, "StackExchangeClusteringP2P": 34.01, "TwentyNewsgroupsClustering": 49.37 } ] }, "PairClassification": { "ap": [ { "Model": "LLM2Vec-Sheared-Llama-supervised", "SprintDuplicateQuestions": 96.25, "TwitterSemEval2015": 76.14, "TwitterURLCorpus": 86.23 } ] }, "Reranking": { "map": [ { "Model": "LLM2Vec-Sheared-Llama-supervised", "AskUbuntuDupQuestions": 60.71, "MindSmallReranking": 31.96, "SciDocsRR": 79.23, "StackOverflowDupQuestions": 49.61 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "LLM2Vec-Sheared-Llama-supervised", "ArguAna": 51.66, "CQADupstackRetrieval": 41.73, "ClimateFEVER": 33.49, "DBPedia": 43.58, "FEVER": 86.81, "FiQA2018": 41.0, "HotpotQA": 63.85, "MSMARCO": 38.32, "NFCorpus": 37.12, "NQ": 53.89, "QuoraRetrieval": 87.37, "SCIDOCS": 17.96, "SciFact": 72.08, "TRECCOVID": 80.41, "Touche2020": 22.31 } ] }, "STS": { "spearman": [ { "Model": "LLM2Vec-Sheared-Llama-supervised", "BIOSSES": 85.88, "SICK-R": 82.25, "STS12": 78.28, "STS13": 85.52, "STS14": 82.49, "STS15": 88.76, "STS16": 87.11, "STS17 (en-en)": 90.1, "STS22 (en)": 68.25, "STSBenchmark": 87.16 } ] }, "Summarization": { "spearman": [ { "Model": "LLM2Vec-Sheared-Llama-supervised", "SummEval": 30.01 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "LLM2Vec-Sheared-Llama-supervised" } ] } }, "text-search-curie-001": { "BitextMining": { "f1": [ { "Model": "text-search-curie-001" } ] }, "Classification": { "accuracy": [ { "Model": "text-search-curie-001" } ] }, "Clustering": { "v_measure": [ { "Model": "text-search-curie-001" } ] }, "PairClassification": { "ap": [ { "Model": "text-search-curie-001" } ] }, "Reranking": { "map": [ { "Model": "text-search-curie-001" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "text-search-curie-001", "ArguAna": 46.98, "ClimateFEVER": 19.4, "FEVER": 75.6, "FiQA2018": 45.21, "HotpotQA": 64.8, "NFCorpus": 38.01, "QuoraRetrieval": 67.7, "SCIDOCS": 17.74, "SciFact": 74.35, "TRECCOVID": 56.14, "Touche2020": 30.9 } ] }, "STS": { "spearman": [ { "Model": "text-search-curie-001" } ] }, "Summarization": { "spearman": [ { "Model": "text-search-curie-001" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "text-search-curie-001" } ] } }, "text-embedding-ada-002-instruct": { "BitextMining": { "f1": [ { "Model": "text-embedding-ada-002-instruct" } ] }, "Classification": { "accuracy": [ { "Model": "text-embedding-ada-002-instruct" } ] }, "Clustering": { "v_measure": [ { "Model": "text-embedding-ada-002-instruct" } ] }, "PairClassification": { "ap": [ { "Model": "text-embedding-ada-002-instruct" } ] }, "Reranking": { "map": [ { "Model": "text-embedding-ada-002-instruct" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "text-embedding-ada-002-instruct", "ARCChallenge": 11.85, "AlphaNLI": 10.62, "HellaSwag": 24.8, "PIQA": 23.87, "Quail": 5.79, "RARbCode": 82.36, "RARbMath": 67.26, "SIQA": 2.64, "SpartQA": 4.75, "TempReasonL1": 1.44, "TempReasonL2Fact": 19.38, "TempReasonL2Pure": 2.43, "TempReasonL3Fact": 17.58, "TempReasonL3Pure": 7.31, "WinoGrande": 11.36 } ] }, "STS": { "spearman": [ { "Model": "text-embedding-ada-002-instruct" } ] }, "Summarization": { "spearman": [ { "Model": "text-embedding-ada-002-instruct" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "text-embedding-ada-002-instruct" } ] } }, "voyage-large-2-instruct": { "BitextMining": { "f1": [ { "Model": "voyage-large-2-instruct" } ] }, "Classification": { "accuracy": [ { "Model": "voyage-large-2-instruct", "AmazonCounterfactualClassification (en)": 77.6, "AmazonPolarityClassification": 96.58, "AmazonReviewsClassification (en)": 50.77, "Banking77Classification": 86.96, "EmotionClassification": 59.81, "ImdbClassification": 96.13, "MTOPDomainClassification (en)": 98.86, "MTOPIntentClassification (en)": 86.97, "MassiveIntentClassification (en)": 81.08, "MassiveScenarioClassification (en)": 87.95, "ToxicConversationsClassification": 83.58, "TweetSentimentExtractionClassification": 71.55 } ] }, "Clustering": { "v_measure": [ { "Model": "voyage-large-2-instruct", "ArxivClusteringP2P": 51.81, "ArxivClusteringS2S": 44.73, "BiorxivClusteringP2P": 46.07, "BiorxivClusteringS2S": 40.64, "MedrxivClusteringP2P": 42.94, "MedrxivClusteringS2S": 41.44, "RedditClustering": 68.5, "RedditClusteringP2P": 64.86, "StackExchangeClustering": 74.16, "StackExchangeClusteringP2P": 45.1, "TwentyNewsgroupsClustering": 66.62 } ] }, "PairClassification": { "ap": [ { "Model": "voyage-large-2-instruct", "SprintDuplicateQuestions": 94.5, "TwitterSemEval2015": 86.32, "TwitterURLCorpus": 86.9 } ] }, "Reranking": { "map": [ { "Model": "voyage-large-2-instruct", "AskUbuntuDupQuestions": 64.92, "MindSmallReranking": 30.97, "SciDocsRR": 89.34, "StackOverflowDupQuestions": 55.11 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "voyage-large-2-instruct", "ArguAna": 64.06, "BrightRetrieval (theoremqa_questions)": 26.06, "BrightRetrieval (earth_science)": 25.09, "BrightRetrieval (leetcode)": 30.6, "BrightRetrieval (economics)": 19.85, "BrightRetrieval (robotics)": 11.21, "BrightRetrieval (psychology)": 24.79, "BrightRetrieval (aops)": 7.45, "BrightRetrieval (sustainable_living)": 15.58, "BrightRetrieval (pony)": 1.48, "BrightRetrieval (theoremqa_theorems)": 10.13, "BrightRetrieval (biology)": 23.55, "BrightRetrieval (stackoverflow)": 15.03, "CQADupstackRetrieval": 46.6, "ClimateFEVER": 32.65, "DBPedia": 46.03, "FEVER": 91.47, "FiQA2018": 59.76, "HotpotQA": 70.86, "MSMARCO": 40.6, "NFCorpus": 40.32, "NQ": 65.92, "QuoraRetrieval": 87.4, "SCIDOCS": 24.32, "SciFact": 79.99, "TRECCOVID": 85.07, "Touche2020": 39.16 } ] }, "STS": { "spearman": [ { "Model": "voyage-large-2-instruct", "BIOSSES": 89.24, "SICK-R": 83.16, "STS12": 73.34, "STS13": 88.49, "STS14": 86.49, "STS15": 91.13, "STS16": 85.68, "STS17 (en-en)": 90.06, "STS22 (en)": 66.32, "STSBenchmark": 89.22 } ] }, "Summarization": { "spearman": [ { "Model": "voyage-large-2-instruct", "SummEval": 30.84 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "voyage-large-2-instruct" } ] } }, "LaBSE-ru-turbo": { "BitextMining": { "f1": [ { "Model": "LaBSE-ru-turbo", "Tatoeba (rus-Cyrl_eng-Latn)": 93.22 } ] }, "Classification": { "accuracy": [ { "Model": "LaBSE-ru-turbo", "GeoreviewClassification (rus-Cyrl)": 46.04, "HeadlineClassification (rus-Cyrl)": 69.98, "InappropriatenessClassification (rus-Cyrl)": 61.39, "KinopoiskClassification (rus-Cyrl)": 53.59, "MassiveIntentClassification (rus-Cyrl)": 66.08, "MassiveScenarioClassification (rus-Cyrl)": 71.13, "RuReviewsClassification (rus-Cyrl)": 64.58, "RuSciBenchGRNTIClassification (rus-Cyrl)": 56.67, "RuSciBenchOECDClassification (rus-Cyrl)": 43.58 } ] }, "Clustering": { "v_measure": [ { "Model": "LaBSE-ru-turbo", "GeoreviewClusteringP2P (rus-Cyrl)": 64.55, "MLSUMClusteringP2P (rus-Cyrl)": 45.7, "MLSUMClusteringS2S (rus-Cyrl)": 42.93, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 50.64, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 44.48 } ] }, "PairClassification": { "ap": [ { "Model": "LaBSE-ru-turbo", "OpusparcusPC (rus-Cyrl)": 89.32, "TERRa (rus-Cyrl)": 57.81 } ] }, "Reranking": { "map": [ { "Model": "LaBSE-ru-turbo", "RuBQReranking (rus-Cyrl)": 68.65 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "LaBSE-ru-turbo", "RiaNewsRetrieval (rus-Cyrl)": 69.36, "RuBQRetrieval (rus-Cyrl)": 65.71 } ] }, "STS": { "spearman": [ { "Model": "LaBSE-ru-turbo", "RUParaPhraserSTS (rus-Cyrl)": 72.97, "RuSTSBenchmarkSTS (rus-Cyrl)": 81.77, "STS22 (rus-Cyrl)": 62.89, "STSBenchmarkMultilingualSTS (rus-Cyrl)": 81.81 } ] }, "Summarization": { "spearman": [ { "Model": "LaBSE-ru-turbo" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "LaBSE-ru-turbo" } ] } }, "tart-full-flan-t5-xl": { "BitextMining": { "f1": [ { "Model": "tart-full-flan-t5-xl" } ] }, "Classification": { "accuracy": [ { "Model": "tart-full-flan-t5-xl" } ] }, "Clustering": { "v_measure": [ { "Model": "tart-full-flan-t5-xl" } ] }, "PairClassification": { "ap": [ { "Model": "tart-full-flan-t5-xl" } ] }, "Reranking": { "map": [ { "Model": "tart-full-flan-t5-xl" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "tart-full-flan-t5-xl" } ] }, "STS": { "spearman": [ { "Model": "tart-full-flan-t5-xl" } ] }, "Summarization": { "spearman": [ { "Model": "tart-full-flan-t5-xl" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "tart-full-flan-t5-xl", "Core17InstructionRetrieval": 2.82, "News21InstructionRetrieval": 1.99, "Robust04InstructionRetrieval": -0.72 } ] } }, "sentence-t5-xxl": { "BitextMining": { "f1": [ { "Model": "sentence-t5-xxl" } ] }, "Classification": { "accuracy": [ { "Model": "sentence-t5-xxl", "AmazonCounterfactualClassification (en)": 77.07, "AmazonPolarityClassification": 92.79, "AmazonReviewsClassification (en)": 48.93, "AmazonReviewsClassification (fr)": 46.09, "Banking77Classification": 82.31, "EmotionClassification": 48.57, "ImdbClassification": 90.23, "MTOPDomainClassification (en)": 92.49, "MTOPDomainClassification (fr)": 86.2, "MTOPIntentClassification (en)": 68.33, "MTOPIntentClassification (fr)": 58.33, "MasakhaNEWSClassification (fra)": 79.1, "MassiveIntentClassification (en)": 73.44, "MassiveIntentClassification (fr)": 65.91, "MassiveScenarioClassification (en)": 74.82, "MassiveScenarioClassification (fr)": 68.53, "ToxicConversationsClassification": 70.04, "TweetSentimentExtractionClassification": 62.01 } ] }, "Clustering": { "v_measure": [ { "Model": "sentence-t5-xxl", "AlloProfClusteringP2P": 60.98, "AlloProfClusteringS2S": 43.5, "ArxivClusteringP2P": 42.89, "ArxivClusteringS2S": 33.47, "BiorxivClusteringP2P": 36.53, "BiorxivClusteringS2S": 28.66, "BlurbsClusteringP2P": 39.91, "BlurbsClusteringS2S": 15.94, "HALClusteringS2S": 21.4, "MLSUMClusteringP2P": 42.24, "MLSUMClusteringS2S": 35.25, "MasakhaNEWSClusteringP2P (fra)": 61.15, "MasakhaNEWSClusteringS2S (fra)": 38.24, "MedrxivClusteringP2P": 32.09, "MedrxivClusteringS2S": 26.82, "RedditClustering": 58.99, "RedditClusteringP2P": 64.46, "StackExchangeClustering": 70.78, "StackExchangeClusteringP2P": 35.25, "TenKGnadClusteringP2P": 43.43, "TenKGnadClusteringS2S": 19.69, "TwentyNewsgroupsClustering": 50.93 } ] }, "PairClassification": { "ap": [ { "Model": "sentence-t5-xxl", "OpusparcusPC (fr)": 93.94, "PawsXPairClassification (fr)": 63.98, "SprintDuplicateQuestions": 88.89, "TwitterSemEval2015": 80.28, "TwitterURLCorpus": 86.01 } ] }, "Reranking": { "map": [ { "Model": "sentence-t5-xxl", "AlloprofReranking": 68.36, "AskUbuntuDupQuestions": 66.16, "MindSmallReranking": 30.6, "SciDocsRR": 76.09, "StackOverflowDupQuestions": 52.85, "SyntecReranking": 85.15 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "sentence-t5-xxl", "AlloprofRetrieval": 45.75, "ArguAna": 39.85, "BSARDRetrieval": 3.33, "CQADupstackRetrieval": 44.65, "ClimateFEVER": 14.63, "DBPedia": 39.19, "FEVER": 51.2, "FiQA2018": 46.68, "HotpotQA": 42.14, "MSMARCO": 27.67, "MintakaRetrieval (fr)": 34.93, "NFCorpus": 35.08, "NQ": 52.87, "QuoraRetrieval": 85.96, "SCIDOCS": 17.17, "SciFact": 55.38, "SyntecRetrieval": 78.97, "TRECCOVID": 59.48, "Touche2020": 21.65, "XPQARetrieval (fr)": 56.2 } ] }, "STS": { "spearman": [ { "Model": "sentence-t5-xxl", "BIOSSES": 80.43, "SICK-R": 80.47, "SICKFr": 77.07, "STS12": 78.85, "STS13": 88.94, "STS14": 84.86, "STS15": 89.32, "STS16": 84.67, "STS17 (en-en)": 89.46, "STS22 (en)": 65.33, "STS22 (fr)": 76.8, "STSBenchmark": 84.01, "STSBenchmarkMultilingualSTS (fr)": 81.24 } ] }, "Summarization": { "spearman": [ { "Model": "sentence-t5-xxl", "SummEval": 30.08, "SummEvalFr": 30.39 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "sentence-t5-xxl" } ] } }, "LaBSE": { "BitextMining": { "f1": [ { "Model": "LaBSE", "BUCC (de-en)": 99.35, "BUCC (fr-en)": 98.72, "BUCC (ru-en)": 97.78, "BUCC (zh-en)": 99.16, "BornholmBitextMining (dan-Latn)": 45.63, "Tatoeba (ber-Tfng_eng-Latn)": 8.4, "Tatoeba (kab-Latn_eng-Latn)": 4.31, "Tatoeba (tur-Latn_eng-Latn)": 98.0, "Tatoeba (gle-Latn_eng-Latn)": 93.8, "Tatoeba (awa-Deva_eng-Latn)": 71.7, "Tatoeba (yue-Hant_eng-Latn)": 89.58, "Tatoeba (tzl-Latn_eng-Latn)": 58.88, "Tatoeba (tat-Cyrl_eng-Latn)": 85.92, "Tatoeba (fin-Latn_eng-Latn)": 96.37, "Tatoeba (cor-Latn_eng-Latn)": 10.11, "Tatoeba (hye-Armn_eng-Latn)": 94.09, "Tatoeba (ben-Beng_eng-Latn)": 88.55, "Tatoeba (epo-Latn_eng-Latn)": 98.2, "Tatoeba (ile-Latn_eng-Latn)": 85.58, "Tatoeba (nld-Latn_eng-Latn)": 96.07, "Tatoeba (mar-Deva_eng-Latn)": 92.65, "Tatoeba (cmn-Hans_eng-Latn)": 95.1, "Tatoeba (hin-Deva_eng-Latn)": 96.87, "Tatoeba (tgl-Latn_eng-Latn)": 96.02, "Tatoeba (mon-Cyrl_eng-Latn)": 95.91, "Tatoeba (oci-Latn_eng-Latn)": 65.81, "Tatoeba (dan-Latn_eng-Latn)": 95.71, "Tatoeba (mkd-Cyrl_eng-Latn)": 93.6, "Tatoeba (ces-Latn_eng-Latn)": 96.68, "Tatoeba (fra-Latn_eng-Latn)": 94.86, "Tatoeba (yid-Hebr_eng-Latn)": 88.79, "Tatoeba (est-Latn_eng-Latn)": 96.55, "Tatoeba (ast-Latn_eng-Latn)": 90.68, "Tatoeba (ind-Latn_eng-Latn)": 93.66, "Tatoeba (bre-Latn_eng-Latn)": 15.07, "Tatoeba (eus-Latn_eng-Latn)": 95.01, "Tatoeba (heb-Hebr_eng-Latn)": 91.53, "Tatoeba (rus-Cyrl_eng-Latn)": 93.75, "Tatoeba (lfn-Latn_eng-Latn)": 67.54, "Tatoeba (jav-Latn_eng-Latn)": 79.77, "Tatoeba (ukr-Cyrl_eng-Latn)": 93.97, "Tatoeba (ell-Grek_eng-Latn)": 95.35, "Tatoeba (nds-Latn_eng-Latn)": 79.42, "Tatoeba (arz-Arab_eng-Latn)": 76.0, "Tatoeba (gla-Latn_eng-Latn)": 85.66, "Tatoeba (cbk-Latn_eng-Latn)": 79.44, "Tatoeba (max-Deva_eng-Latn)": 63.26, "Tatoeba (ron-Latn_eng-Latn)": 96.92, "Tatoeba (ido-Latn_eng-Latn)": 89.42, "Tatoeba (lvs-Latn_eng-Latn)": 95.88, "Tatoeba (khm-Khmr_eng-Latn)": 78.37, "Tatoeba (urd-Arab_eng-Latn)": 93.22, "Tatoeba (glg-Latn_eng-Latn)": 96.82, "Tatoeba (gsw-Latn_eng-Latn)": 46.5, "Tatoeba (swe-Latn_eng-Latn)": 95.63, "Tatoeba (swh-Latn_eng-Latn)": 84.5, "Tatoeba (tha-Thai_eng-Latn)": 96.14, "Tatoeba (tam-Taml_eng-Latn)": 89.0, "Tatoeba (uzb-Latn_eng-Latn)": 84.23, "Tatoeba (bul-Cyrl_eng-Latn)": 94.58, "Tatoeba (kur-Latn_eng-Latn)": 83.59, "Tatoeba (ina-Latn_eng-Latn)": 95.37, "Tatoeba (nov-Latn_eng-Latn)": 74.38, "Tatoeba (afr-Latn_eng-Latn)": 96.18, "Tatoeba (csb-Latn_eng-Latn)": 52.57, "Tatoeba (war-Latn_eng-Latn)": 60.29, "Tatoeba (cha-Latn_eng-Latn)": 31.77, "Tatoeba (pes-Arab_eng-Latn)": 94.7, "Tatoeba (kat-Geor_eng-Latn)": 95.02, "Tatoeba (bos-Latn_eng-Latn)": 94.92, "Tatoeba (kor-Hang_eng-Latn)": 90.95, "Tatoeba (slk-Latn_eng-Latn)": 96.5, "Tatoeba (fry-Latn_eng-Latn)": 89.31, "Tatoeba (ara-Arab_eng-Latn)": 88.8, "Tatoeba (sqi-Latn_eng-Latn)": 96.76, "Tatoeba (ita-Latn_eng-Latn)": 92.72, "Tatoeba (lat-Latn_eng-Latn)": 80.07, "Tatoeba (hsb-Latn_eng-Latn)": 67.11, "Tatoeba (swg-Latn_eng-Latn)": 59.36, "Tatoeba (srp-Cyrl_eng-Latn)": 94.43, "Tatoeba (isl-Latn_eng-Latn)": 94.75, "Tatoeba (hrv-Latn_eng-Latn)": 96.95, "Tatoeba (wuu-Hans_eng-Latn)": 90.18, "Tatoeba (mhr-Cyrl_eng-Latn)": 15.74, "Tatoeba (vie-Latn_eng-Latn)": 97.2, "Tatoeba (cym-Latn_eng-Latn)": 92.0, "Tatoeba (dsb-Latn_eng-Latn)": 64.81, "Tatoeba (hun-Latn_eng-Latn)": 96.55, "Tatoeba (slv-Latn_eng-Latn)": 96.03, "Tatoeba (orv-Cyrl_eng-Latn)": 38.93, "Tatoeba (cat-Latn_eng-Latn)": 95.38, "Tatoeba (dtp-Latn_eng-Latn)": 10.85, "Tatoeba (por-Latn_eng-Latn)": 94.14, "Tatoeba (jpn-Jpan_eng-Latn)": 95.38, "Tatoeba (ang-Latn_eng-Latn)": 59.28, "Tatoeba (aze-Latn_eng-Latn)": 94.93, "Tatoeba (kzj-Latn_eng-Latn)": 11.33, "Tatoeba (deu-Latn_eng-Latn)": 99.2, "Tatoeba (uig-Arab_eng-Latn)": 92.4, "Tatoeba (tel-Telu_eng-Latn)": 97.86, "Tatoeba (tuk-Latn_eng-Latn)": 75.27, "Tatoeba (nob-Latn_eng-Latn)": 98.4, "Tatoeba (nno-Latn_eng-Latn)": 94.48, "Tatoeba (spa-Latn_eng-Latn)": 98.4, "Tatoeba (mal-Mlym_eng-Latn)": 98.45, "Tatoeba (pam-Latn_eng-Latn)": 10.73, "Tatoeba (xho-Latn_eng-Latn)": 91.55, "Tatoeba (arq-Arab_eng-Latn)": 42.69, "Tatoeba (kaz-Cyrl_eng-Latn)": 87.49, "Tatoeba (bel-Cyrl_eng-Latn)": 95.0, "Tatoeba (pol-Latn_eng-Latn)": 97.22, "Tatoeba (fao-Latn_eng-Latn)": 87.4, "Tatoeba (zsm-Latn_eng-Latn)": 95.62, "Tatoeba (lit-Latn_eng-Latn)": 96.47, "Tatoeba (ceb-Latn_eng-Latn)": 64.42, "Tatoeba (pms-Latn_eng-Latn)": 64.57, "Tatoeba (amh-Ethi_eng-Latn)": 91.47, "Tatoeba (afr-eng)": 96.18, "Tatoeba (amh-eng)": 91.47, "Tatoeba (ang-eng)": 59.28, "Tatoeba (ara-eng)": 88.8, "Tatoeba (arq-eng)": 42.69, "Tatoeba (arz-eng)": 76.0, "Tatoeba (ast-eng)": 90.68, "Tatoeba (awa-eng)": 71.7, "Tatoeba (aze-eng)": 94.93, "Tatoeba (bel-eng)": 95.0, "Tatoeba (ben-eng)": 88.55, "Tatoeba (ber-eng)": 8.4, "Tatoeba (bos-eng)": 94.92, "Tatoeba (bre-eng)": 15.07, "Tatoeba (bul-eng)": 94.58, "Tatoeba (cat-eng)": 95.38, "Tatoeba (cbk-eng)": 79.44, "Tatoeba (ceb-eng)": 64.42, "Tatoeba (ces-eng)": 96.68, "Tatoeba (cha-eng)": 31.77, "Tatoeba (cmn-eng)": 95.1, "Tatoeba (cor-eng)": 10.11, "Tatoeba (csb-eng)": 52.57, "Tatoeba (cym-eng)": 92.0, "Tatoeba (dan-eng)": 95.71, "Tatoeba (deu-eng)": 99.2, "Tatoeba (dsb-eng)": 64.81, "Tatoeba (dtp-eng)": 10.85, "Tatoeba (ell-eng)": 95.35, "Tatoeba (epo-eng)": 98.2, "Tatoeba (est-eng)": 96.55, "Tatoeba (eus-eng)": 95.01, "Tatoeba (fao-eng)": 87.4, "Tatoeba (fin-eng)": 96.37, "Tatoeba (fra-eng)": 94.86, "Tatoeba (fry-eng)": 89.31, "Tatoeba (gla-eng)": 85.66, "Tatoeba (gle-eng)": 93.8, "Tatoeba (glg-eng)": 96.82, "Tatoeba (gsw-eng)": 46.5, "Tatoeba (heb-eng)": 91.53, "Tatoeba (hin-eng)": 96.87, "Tatoeba (hrv-eng)": 96.95, "Tatoeba (hsb-eng)": 67.11, "Tatoeba (hun-eng)": 96.55, "Tatoeba (hye-eng)": 94.09, "Tatoeba (ido-eng)": 89.42, "Tatoeba (ile-eng)": 85.58, "Tatoeba (ina-eng)": 95.37, "Tatoeba (ind-eng)": 93.66, "Tatoeba (isl-eng)": 94.75, "Tatoeba (ita-eng)": 92.72, "Tatoeba (jav-eng)": 79.77, "Tatoeba (jpn-eng)": 95.38, "Tatoeba (kab-eng)": 4.31, "Tatoeba (kat-eng)": 95.02, "Tatoeba (kaz-eng)": 87.49, "Tatoeba (khm-eng)": 78.37, "Tatoeba (kor-eng)": 90.95, "Tatoeba (kur-eng)": 83.59, "Tatoeba (kzj-eng)": 11.33, "Tatoeba (lat-eng)": 80.07, "Tatoeba (lfn-eng)": 67.54, "Tatoeba (lit-eng)": 96.47, "Tatoeba (lvs-eng)": 95.88, "Tatoeba (mal-eng)": 98.45, "Tatoeba (mar-eng)": 92.65, "Tatoeba (max-eng)": 63.26, "Tatoeba (mhr-eng)": 15.74, "Tatoeba (mkd-eng)": 93.6, "Tatoeba (mon-eng)": 95.91, "Tatoeba (nds-eng)": 79.42, "Tatoeba (nld-eng)": 96.07, "Tatoeba (nno-eng)": 94.48, "Tatoeba (nob-eng)": 98.4, "Tatoeba (nov-eng)": 74.38, "Tatoeba (oci-eng)": 65.81, "Tatoeba (orv-eng)": 38.93, "Tatoeba (pam-eng)": 10.73, "Tatoeba (pes-eng)": 94.7, "Tatoeba (pms-eng)": 64.57, "Tatoeba (pol-eng)": 97.22, "Tatoeba (por-eng)": 94.14, "Tatoeba (ron-eng)": 96.92, "Tatoeba (rus-eng)": 93.75, "Tatoeba (slk-eng)": 96.5, "Tatoeba (slv-eng)": 96.03, "Tatoeba (spa-eng)": 98.4, "Tatoeba (sqi-eng)": 96.76, "Tatoeba (srp-eng)": 94.43, "Tatoeba (swe-eng)": 95.63, "Tatoeba (swg-eng)": 59.36, "Tatoeba (swh-eng)": 84.5, "Tatoeba (tam-eng)": 89.0, "Tatoeba (tat-eng)": 85.92, "Tatoeba (tel-eng)": 97.86, "Tatoeba (tgl-eng)": 96.02, "Tatoeba (tha-eng)": 96.14, "Tatoeba (tuk-eng)": 75.27, "Tatoeba (tur-eng)": 98.0, "Tatoeba (tzl-eng)": 58.88, "Tatoeba (uig-eng)": 92.4, "Tatoeba (ukr-eng)": 93.97, "Tatoeba (urd-eng)": 93.22, "Tatoeba (uzb-eng)": 84.23, "Tatoeba (vie-eng)": 97.2, "Tatoeba (war-eng)": 60.29, "Tatoeba (wuu-eng)": 90.18, "Tatoeba (xho-eng)": 91.55, "Tatoeba (yid-eng)": 88.79, "Tatoeba (yue-eng)": 89.58, "Tatoeba (zsm-eng)": 95.62 } ] }, "Classification": { "accuracy": [ { "Model": "LaBSE", "AllegroReviews (pol-Latn)": 34.86, "AllegroReviews": 34.89, "AmazonCounterfactualClassification (en-ext)": 76.09, "AmazonCounterfactualClassification (en)": 75.93, "AmazonCounterfactualClassification (deu-Latn)": 73.17, "AmazonCounterfactualClassification (jpn-Jpan)": 76.4, "AmazonCounterfactualClassification (de)": 73.17, "AmazonCounterfactualClassification (ja)": 76.42, "AmazonPolarityClassification": 68.95, "AmazonReviewsClassification (en)": 35.8, "AmazonReviewsClassification (deu-Latn)": 39.93, "AmazonReviewsClassification (spa-Latn)": 39.39, "AmazonReviewsClassification (fra-Latn)": 38.53, "AmazonReviewsClassification (jpn-Jpan)": 36.45, "AmazonReviewsClassification (cmn-Hans)": 36.45, "AmazonReviewsClassification (de)": 39.92, "AmazonReviewsClassification (es)": 39.39, "AmazonReviewsClassification (fr)": 38.52, "AmazonReviewsClassification (ja)": 36.44, "AmazonReviewsClassification (zh)": 36.45, "AngryTweetsClassification (dan-Latn)": 51.11, "Banking77Classification": 69.85, "CBD (pol-Latn)": 65.74, "CBD": 65.71, "DanishPoliticalCommentsClassification (dan-Latn)": 38.34, "EmotionClassification": 37.22, "GeoreviewClassification (rus-Cyrl)": 40.86, "HeadlineClassification (rus-Cyrl)": 68.75, "IFlyTek (cmn-Hans)": 43.19, "ImdbClassification": 62.04, "InappropriatenessClassification (rus-Cyrl)": 58.52, "JDReview (cmn-Hans)": 79.14, "KinopoiskClassification (rus-Cyrl)": 46.77, "LccSentimentClassification (dan-Latn)": 50.07, "MTOPDomainClassification (en)": 86.06, "MTOPDomainClassification (deu-Latn)": 86.93, "MTOPDomainClassification (spa-Latn)": 84.06, "MTOPDomainClassification (fra-Latn)": 84.14, "MTOPDomainClassification (hin-Deva)": 85.11, "MTOPDomainClassification (tha-Thai)": 81.24, "MTOPDomainClassification (de)": 86.95, "MTOPDomainClassification (es)": 84.07, "MTOPDomainClassification (fr)": 84.14, "MTOPDomainClassification (hi)": 85.11, "MTOPDomainClassification (th)": 81.24, "MTOPIntentClassification (en)": 63.03, "MTOPIntentClassification (deu-Latn)": 63.46, "MTOPIntentClassification (spa-Latn)": 64.46, "MTOPIntentClassification (fra-Latn)": 62.05, "MTOPIntentClassification (hin-Deva)": 62.61, "MTOPIntentClassification (tha-Thai)": 64.7, "MTOPIntentClassification (de)": 63.42, "MTOPIntentClassification (es)": 64.44, "MTOPIntentClassification (fr)": 62.01, "MTOPIntentClassification (hi)": 62.58, "MTOPIntentClassification (th)": 64.61, "MasakhaNEWSClassification (amh-Ethi)": 81.78, "MasakhaNEWSClassification (eng)": 77.77, "MasakhaNEWSClassification (fra-Latn)": 72.09, "MasakhaNEWSClassification (hau-Latn)": 73.12, "MasakhaNEWSClassification (ibo-Latn)": 69.1, "MasakhaNEWSClassification (lin-Latn)": 74.63, "MasakhaNEWSClassification (lug-Latn)": 57.44, "MasakhaNEWSClassification (orm-Ethi)": 51.6, "MasakhaNEWSClassification (pcm-Latn)": 91.44, "MasakhaNEWSClassification (run-Latn)": 73.76, "MasakhaNEWSClassification (sna-Latn)": 87.18, "MasakhaNEWSClassification (som-Latn)": 60.03, "MasakhaNEWSClassification (swa-Latn)": 69.33, "MasakhaNEWSClassification (tir-Ethi)": 61.73, "MasakhaNEWSClassification (xho-Latn)": 77.34, "MasakhaNEWSClassification (yor-Latn)": 77.13, "MasakhaNEWSClassification (fra)": 77.39, "MassiveIntentClassification (isl-Latn)": 54.83, "MassiveIntentClassification (dan-Latn)": 58.23, "MassiveIntentClassification (khm-Khmr)": 48.46, "MassiveIntentClassification (lav-Latn)": 57.06, "MassiveIntentClassification (fas-Arab)": 62.33, "MassiveIntentClassification (afr-Latn)": 56.07, "MassiveIntentClassification (fra-Latn)": 60.42, "MassiveIntentClassification (tgl-Latn)": 55.1, "MassiveIntentClassification (tur-Latn)": 60.91, "MassiveIntentClassification (spa-Latn)": 58.26, "MassiveIntentClassification (ind-Latn)": 61.14, "MassiveIntentClassification (hin-Deva)": 59.37, "MassiveIntentClassification (hun-Latn)": 59.52, "MassiveIntentClassification (swa-Latn)": 51.5, "MassiveIntentClassification (cmo-Hant)": 59.47, "MassiveIntentClassification (deu-Latn)": 56.16, "MassiveIntentClassification (amh-Ethi)": 55.67, "MassiveIntentClassification (ara-Arab)": 50.78, "MassiveIntentClassification (hye-Armn)": 56.22, "MassiveIntentClassification (ita-Latn)": 59.75, "MassiveIntentClassification (jpn-Jpan)": 63.13, "MassiveIntentClassification (jav-Latn)": 50.94, "MassiveIntentClassification (slv-Latn)": 59.39, "MassiveIntentClassification (vie-Latn)": 56.68, "MassiveIntentClassification (kan-Knda)": 56.2, "MassiveIntentClassification (kor-Kore)": 60.93, "MassiveIntentClassification (nld-Latn)": 59.31, "MassiveIntentClassification (cym-Latn)": 50.17, "MassiveIntentClassification (ron-Latn)": 57.84, "MassiveIntentClassification (tha-Thai)": 56.45, "MassiveIntentClassification (kat-Geor)": 48.26, "MassiveIntentClassification (cmo-Hans)": 63.85, "MassiveIntentClassification (mya-Mymr)": 57.23, "MassiveIntentClassification (pol-Latn)": 59.75, "MassiveIntentClassification (msa-Latn)": 58.59, "MassiveIntentClassification (urd-Arab)": 56.68, "MassiveIntentClassification (tel-Telu)": 58.33, "MassiveIntentClassification (mal-Mlym)": 57.75, "MassiveIntentClassification (fin-Latn)": 60.09, "MassiveIntentClassification (aze-Latn)": 58.91, "MassiveIntentClassification (ben-Beng)": 58.14, "MassiveIntentClassification (en)": 61.46, "MassiveIntentClassification (mon-Cyrl)": 58.46, "MassiveIntentClassification (por-Latn)": 60.15, "MassiveIntentClassification (rus-Cyrl)": 60.64, "MassiveIntentClassification (ell-Grek)": 56.96, "MassiveIntentClassification (nob-Latn)": 57.81, "MassiveIntentClassification (heb-Hebr)": 56.42, "MassiveIntentClassification (swe-Latn)": 59.64, "MassiveIntentClassification (tam-Taml)": 54.95, "MassiveIntentClassification (sqi-Latn)": 58.08, "MassiveIntentClassification (af)": 56.12, "MassiveIntentClassification (am)": 55.71, "MassiveIntentClassification (ar)": 50.86, "MassiveIntentClassification (az)": 58.97, "MassiveIntentClassification (bn)": 58.22, "MassiveIntentClassification (cy)": 50.16, "MassiveIntentClassification (da)": 58.25, "MassiveIntentClassification (de)": 56.21, "MassiveIntentClassification (el)": 57.03, "MassiveIntentClassification (es)": 58.32, "MassiveIntentClassification (fa)": 62.33, "MassiveIntentClassification (fi)": 60.12, "MassiveIntentClassification (fr)": 60.47, "MassiveIntentClassification (he)": 56.55, "MassiveIntentClassification (hi)": 59.4, "MassiveIntentClassification (hu)": 59.52, "MassiveIntentClassification (hy)": 56.2, "MassiveIntentClassification (id)": 61.12, "MassiveIntentClassification (is)": 54.9, "MassiveIntentClassification (it)": 59.83, "MassiveIntentClassification (ja)": 63.11, "MassiveIntentClassification (jv)": 50.98, "MassiveIntentClassification (ka)": 48.35, "MassiveIntentClassification (km)": 48.55, "MassiveIntentClassification (kn)": 56.24, "MassiveIntentClassification (ko)": 60.99, "MassiveIntentClassification (lv)": 57.1, "MassiveIntentClassification (ml)": 57.91, "MassiveIntentClassification (mn)": 58.5, "MassiveIntentClassification (ms)": 58.6, "MassiveIntentClassification (my)": 57.35, "MassiveIntentClassification (nb)": 57.91, "MassiveIntentClassification (nl)": 59.37, "MassiveIntentClassification (pl)": 59.71, "MassiveIntentClassification (pt)": 60.16, "MassiveIntentClassification (ro)": 57.92, "MassiveIntentClassification (ru)": 60.67, "MassiveIntentClassification (sl)": 59.37, "MassiveIntentClassification (sq)": 58.03, "MassiveIntentClassification (sv)": 59.66, "MassiveIntentClassification (sw)": 51.62, "MassiveIntentClassification (ta)": 55.04, "MassiveIntentClassification (te)": 58.32, "MassiveIntentClassification (th)": 56.58, "MassiveIntentClassification (tl)": 55.28, "MassiveIntentClassification (tr)": 60.91, "MassiveIntentClassification (ur)": 56.7, "MassiveIntentClassification (vi)": 56.67, "MassiveIntentClassification (zh-CN)": 63.86, "MassiveIntentClassification (zh-TW)": 59.51, "MassiveScenarioClassification (ron-Latn)": 62.39, "MassiveScenarioClassification (vie-Latn)": 61.06, "MassiveScenarioClassification (nld-Latn)": 65.17, "MassiveScenarioClassification (urd-Arab)": 61.47, "MassiveScenarioClassification (tha-Thai)": 64.32, "MassiveScenarioClassification (tur-Latn)": 65.43, "MassiveScenarioClassification (deu-Latn)": 62.4, "MassiveScenarioClassification (fas-Arab)": 67.43, "MassiveScenarioClassification (kat-Geor)": 53.37, "MassiveScenarioClassification (khm-Khmr)": 56.2, "MassiveScenarioClassification (cmo-Hant)": 67.05, "MassiveScenarioClassification (tam-Taml)": 59.07, "MassiveScenarioClassification (isl-Latn)": 61.93, "MassiveScenarioClassification (amh-Ethi)": 62.02, "MassiveScenarioClassification (ara-Arab)": 57.74, "MassiveScenarioClassification (fin-Latn)": 64.56, "MassiveScenarioClassification (kor-Kore)": 67.3, "MassiveScenarioClassification (hin-Deva)": 64.41, "MassiveScenarioClassification (aze-Latn)": 63.47, "MassiveScenarioClassification (en)": 66.41, "MassiveScenarioClassification (hun-Latn)": 65.82, "MassiveScenarioClassification (slv-Latn)": 64.26, "MassiveScenarioClassification (ell-Grek)": 64.57, "MassiveScenarioClassification (nob-Latn)": 64.3, "MassiveScenarioClassification (hye-Armn)": 61.29, "MassiveScenarioClassification (dan-Latn)": 65.26, "MassiveScenarioClassification (jpn-Jpan)": 67.7, "MassiveScenarioClassification (sqi-Latn)": 64.56, "MassiveScenarioClassification (mon-Cyrl)": 62.62, "MassiveScenarioClassification (fra-Latn)": 65.1, "MassiveScenarioClassification (cym-Latn)": 56.12, "MassiveScenarioClassification (ind-Latn)": 65.86, "MassiveScenarioClassification (por-Latn)": 63.28, "MassiveScenarioClassification (ita-Latn)": 64.09, "MassiveScenarioClassification (swa-Latn)": 58.37, "MassiveScenarioClassification (kan-Knda)": 61.74, "MassiveScenarioClassification (mya-Mymr)": 62.94, "MassiveScenarioClassification (mal-Mlym)": 62.24, "MassiveScenarioClassification (rus-Cyrl)": 65.23, "MassiveScenarioClassification (tel-Telu)": 64.12, "MassiveScenarioClassification (swe-Latn)": 65.99, "MassiveScenarioClassification (cmo-Hans)": 70.84, "MassiveScenarioClassification (ben-Beng)": 61.86, "MassiveScenarioClassification (lav-Latn)": 61.86, "MassiveScenarioClassification (jav-Latn)": 58.29, "MassiveScenarioClassification (spa-Latn)": 63.61, "MassiveScenarioClassification (tgl-Latn)": 60.23, "MassiveScenarioClassification (heb-Hebr)": 63.52, "MassiveScenarioClassification (afr-Latn)": 63.38, "MassiveScenarioClassification (msa-Latn)": 65.62, "MassiveScenarioClassification (pol-Latn)": 64.55, "MassiveScenarioClassification (af)": 63.39, "MassiveScenarioClassification (am)": 62.02, "MassiveScenarioClassification (ar)": 57.72, "MassiveScenarioClassification (az)": 63.48, "MassiveScenarioClassification (bn)": 61.84, "MassiveScenarioClassification (cy)": 56.13, "MassiveScenarioClassification (da)": 65.24, "MassiveScenarioClassification (de)": 62.39, "MassiveScenarioClassification (el)": 64.58, "MassiveScenarioClassification (es)": 63.61, "MassiveScenarioClassification (fa)": 67.46, "MassiveScenarioClassification (fi)": 64.58, "MassiveScenarioClassification (fr)": 65.1, "MassiveScenarioClassification (he)": 63.53, "MassiveScenarioClassification (hi)": 64.4, "MassiveScenarioClassification (hu)": 65.82, "MassiveScenarioClassification (hy)": 61.25, "MassiveScenarioClassification (id)": 65.84, "MassiveScenarioClassification (is)": 61.94, "MassiveScenarioClassification (it)": 64.09, "MassiveScenarioClassification (ja)": 67.72, "MassiveScenarioClassification (jv)": 58.29, "MassiveScenarioClassification (ka)": 53.38, "MassiveScenarioClassification (km)": 56.18, "MassiveScenarioClassification (kn)": 61.74, "MassiveScenarioClassification (ko)": 67.26, "MassiveScenarioClassification (lv)": 61.87, "MassiveScenarioClassification (ml)": 62.26, "MassiveScenarioClassification (mn)": 62.6, "MassiveScenarioClassification (ms)": 65.63, "MassiveScenarioClassification (my)": 62.94, "MassiveScenarioClassification (nb)": 64.29, "MassiveScenarioClassification (nl)": 65.16, "MassiveScenarioClassification (pl)": 64.58, "MassiveScenarioClassification (pt)": 63.28, "MassiveScenarioClassification (ro)": 62.41, "MassiveScenarioClassification (ru)": 65.25, "MassiveScenarioClassification (sl)": 64.25, "MassiveScenarioClassification (sq)": 64.54, "MassiveScenarioClassification (sv)": 66.01, "MassiveScenarioClassification (sw)": 58.36, "MassiveScenarioClassification (ta)": 59.08, "MassiveScenarioClassification (te)": 64.13, "MassiveScenarioClassification (th)": 64.34, "MassiveScenarioClassification (tl)": 60.23, "MassiveScenarioClassification (tr)": 65.43, "MassiveScenarioClassification (ur)": 61.52, "MassiveScenarioClassification (vi)": 61.05, "MassiveScenarioClassification (zh-CN)": 70.85, "MassiveScenarioClassification (zh-TW)": 67.08, "MultilingualSentiment (cmn-Hans)": 64.6, "NoRecClassification (nob-Latn)": 45.45, "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 35.39, "OnlineShopping (cmn-Hans)": 85.63, "PAC (pol-Latn)": 68.09, "PAC": 68.11, "PolEmo2.0-IN (pol-Latn)": 63.91, "PolEmo2.0-IN": 64.0, "PolEmo2.0-OUT (pol-Latn)": 44.76, "PolEmo2.0-OUT": 44.72, "RuReviewsClassification (rus-Cyrl)": 58.01, "RuSciBenchGRNTIClassification (rus-Cyrl)": 53.04, "RuSciBenchOECDClassification (rus-Cyrl)": 40.48, "TNews (cmn-Hans)": 46.02, "ToxicConversationsClassification": 66.9, "TweetSentimentExtractionClassification": 58.82, "Waimai (cmn-Hans)": 82.85 } ] }, "Clustering": { "v_measure": [ { "Model": "LaBSE", "8TagsClustering": 12.96, "AlloProfClusteringP2P": 54.78, "AlloProfClusteringS2S": 31.6, "ArxivClusteringP2P": 32.13, "ArxivClusteringS2S": 22.05, "BiorxivClusteringP2P": 29.84, "BiorxivClusteringS2S": 20.57, "GeoreviewClusteringP2P (rus-Cyrl)": 52.19, "HALClusteringS2S": 20.62, "MLSUMClusteringP2P (rus-Cyrl)": 39.45, "MLSUMClusteringP2P": 42.09, "MLSUMClusteringS2S (rus-Cyrl)": 35.77, "MLSUMClusteringS2S": 34.84, "MasakhaNEWSClusteringP2P (amh-Ethi)": 67.78, "MasakhaNEWSClusteringP2P (eng)": 48.16, "MasakhaNEWSClusteringP2P (fra-Latn)": 46.16, "MasakhaNEWSClusteringP2P (hau-Latn)": 39.77, "MasakhaNEWSClusteringP2P (ibo-Latn)": 62.67, "MasakhaNEWSClusteringP2P (lin-Latn)": 62.98, "MasakhaNEWSClusteringP2P (lug-Latn)": 47.76, "MasakhaNEWSClusteringP2P (orm-Ethi)": 28.76, "MasakhaNEWSClusteringP2P (pcm-Latn)": 77.16, "MasakhaNEWSClusteringP2P (run-Latn)": 60.36, "MasakhaNEWSClusteringP2P (sna-Latn)": 63.57, "MasakhaNEWSClusteringP2P (som-Latn)": 34.94, "MasakhaNEWSClusteringP2P (swa-Latn)": 27.26, "MasakhaNEWSClusteringP2P (tir-Ethi)": 51.59, "MasakhaNEWSClusteringP2P (xho-Latn)": 45.32, "MasakhaNEWSClusteringP2P (yor-Latn)": 48.73, "MasakhaNEWSClusteringP2P (fra)": 46.16, "MasakhaNEWSClusteringS2S (amh-Ethi)": 52.73, "MasakhaNEWSClusteringS2S (eng)": 32.6, "MasakhaNEWSClusteringS2S (fra-Latn)": 38.13, "MasakhaNEWSClusteringS2S (hau-Latn)": 31.62, "MasakhaNEWSClusteringS2S (ibo-Latn)": 32.27, "MasakhaNEWSClusteringS2S (lin-Latn)": 49.38, "MasakhaNEWSClusteringS2S (lug-Latn)": 47.63, "MasakhaNEWSClusteringS2S (orm-Ethi)": 25.05, "MasakhaNEWSClusteringS2S (pcm-Latn)": 68.18, "MasakhaNEWSClusteringS2S (run-Latn)": 52.39, "MasakhaNEWSClusteringS2S (sna-Latn)": 46.9, "MasakhaNEWSClusteringS2S (som-Latn)": 24.08, "MasakhaNEWSClusteringS2S (swa-Latn)": 15.83, "MasakhaNEWSClusteringS2S (tir-Ethi)": 49.07, "MasakhaNEWSClusteringS2S (xho-Latn)": 28.52, "MasakhaNEWSClusteringS2S (yor-Latn)": 32.26, "MasakhaNEWSClusteringS2S (fra)": 38.13, "MedrxivClusteringP2P": 30.13, "MedrxivClusteringS2S": 24.82, "RedditClustering": 28.79, "RedditClusteringP2P": 49.14, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 49.09, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 41.97, "StackExchangeClustering": 35.43, "StackExchangeClusteringP2P": 28.83, "TwentyNewsgroupsClustering": 23.28 } ] }, "PairClassification": { "ap": [ { "Model": "LaBSE", "CDSC-E (pol-Latn)": 68.92, "CDSC-E": 68.91, "OpusparcusPC (deu-Latn)": 96.58, "OpusparcusPC (en)": 98.12, "OpusparcusPC (fin-Latn)": 94.44, "OpusparcusPC (fra-Latn)": 93.96, "OpusparcusPC (rus-Cyrl)": 87.3, "OpusparcusPC (swe-Latn)": 93.69, "OpusparcusPC (fr)": 93.96, "PPC": 86.97, "PSC (pol-Latn)": 97.42, "PSC": 97.42, "PawsXPairClassification (deu-Latn)": 51.07, "PawsXPairClassification (en)": 54.07, "PawsXPairClassification (spa-Latn)": 52.19, "PawsXPairClassification (fra-Latn)": 54.63, "PawsXPairClassification (jpn-Hira)": 47.56, "PawsXPairClassification (kor-Hang)": 49.39, "PawsXPairClassification (cmn-Hans)": 54.26, "PawsXPairClassification (fr)": 54.63, "SICK-E-PL (pol-Latn)": 63.77, "SICK-E-PL": 63.77, "SprintDuplicateQuestions": 89.26, "TERRa (rus-Cyrl)": 55.71, "TwitterSemEval2015": 62.78, "TwitterURLCorpus": 84.58 } ] }, "Reranking": { "map": [ { "Model": "LaBSE", "AlloprofReranking (fra-Latn)": 55.37, "AlloprofReranking": 49.51, "AskUbuntuDupQuestions": 52.75, "MMarcoReranking (cmn-Hans)": 14.83, "MindSmallReranking": 29.81, "RuBQReranking (rus-Cyrl)": 55.13, "SciDocsRR": 68.72, "StackOverflowDupQuestions": 42.42, "SyntecReranking (fra-Latn)": 67.62, "SyntecReranking": 73.28, "T2Reranking (cmn-Hans)": 63.29 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "LaBSE", "AILACasedocs": 17.67, "AILAStatutes": 16.72, "ARCChallenge": 3.78, "AlloprofRetrieval (fra-Latn)": 19.77, "AlloprofRetrieval": 19.77, "AlphaNLI": 13.11, "ArguAna": 34.18, "ArguAna-PL (pol-Latn)": 38.56, "ArguAna-PL": 38.52, "BSARDRetrieval (fra-Latn)": 4.44, "BSARDRetrieval": 0.0, "CQADupstackRetrieval": 18.75, "ClimateFEVER": 3.83, "CmedqaRetrieval (cmn-Hans)": 5.49, "CovidRetrieval (cmn-Hans)": 28.6, "DBPedia": 15.57, "DBPedia-PL": 16.1, "DuRetrieval (cmn-Hans)": 26.34, "EcomRetrieval (cmn-Hans)": 25.42, "FEVER": 12.18, "FiQA-PL (pol-Latn)": 7.66, "FiQA-PL": 7.63, "FiQA2018": 7.0, "GerDaLIRSmall (deu-Latn)": 4.59, "HellaSwag": 5.59, "HotpotQA": 18.75, "HotpotQA-PL": 19.72, "LEMBNarrativeQARetrieval": 11.45, "LEMBNeedleRetrieval": 17.5, "LEMBPasskeyRetrieval": 20.25, "LEMBQMSumRetrieval": 14.07, "LEMBSummScreenFDRetrieval": 40.52, "LEMBWikimQARetrieval": 28.1, "LeCaRDv2 (zho-Hans)": 24.68, "LegalBenchConsumerContractsQA": 54.66, "LegalBenchCorporateLobbying": 69.39, "LegalQuAD (deu-Latn)": 16.64, "LegalSummarization": 53.89, "MMarcoRetrieval (cmn-Hans)": 34.78, "MSMARCO": 7.6, "MSMARCO-PL": 7.22, "MedicalRetrieval (cmn-Hans)": 6.68, "MintakaRetrieval (ara-Arab)": 14.06, "MintakaRetrieval (deu-Latn)": 15.26, "MintakaRetrieval (spa-Latn)": 15.65, "MintakaRetrieval (fra-Latn)": 15.53, "MintakaRetrieval (hin-Deva)": 13.67, "MintakaRetrieval (ita-Latn)": 15.94, "MintakaRetrieval (jpn-Hira)": 12.8, "MintakaRetrieval (por-Latn)": 15.03, "MintakaRetrieval (fr)": 15.53, "NFCorpus": 16.54, "NFCorpus-PL (pol-Latn)": 17.45, "NFCorpus-PL": 17.45, "NQ": 8.42, "NQ-PL": 9.65, "PIQA": 6.53, "Quail": 1.91, "Quora-PL": 74.96, "QuoraRetrieval": 77.03, "RARbCode": 2.31, "RARbMath": 27.19, "RiaNewsRetrieval (rus-Cyrl)": 42.75, "RuBQRetrieval (rus-Cyrl)": 30.02, "SCIDOCS": 5.63, "SCIDOCS-PL (pol-Latn)": 7.47, "SCIDOCS-PL": 7.48, "SIQA": 1.07, "SciFact": 38.2, "SciFact-PL (pol-Latn)": 39.79, "SciFact-PL": 39.79, "SpartQA": 1.56, "SyntecRetrieval (fra-Latn)": 55.31, "SyntecRetrieval": 55.31, "T2Retrieval (cmn-Hans)": 25.32, "TRECCOVID": 16.34, "TRECCOVID-PL (pol-Latn)": 18.51, "TRECCOVID-PL": 18.45, "TempReasonL1": 1.56, "TempReasonL2Fact": 7.06, "TempReasonL2Pure": 0.14, "TempReasonL3Fact": 8.74, "TempReasonL3Pure": 4.73, "Touche2020": 4.88, "VideoRetrieval (cmn-Hans)": 22.04, "WinoGrande": 54.3, "XPQARetrieval (ara-Arab_ara-Arab)": 35.19, "XPQARetrieval (eng-Latn_ara-Arab)": 20.64, "XPQARetrieval (ara-Arab_eng-Latn)": 32.47, "XPQARetrieval (deu-Latn_deu-Latn)": 53.56, "XPQARetrieval (eng-Latn_deu-Latn)": 24.31, "XPQARetrieval (deu-Latn_eng-Latn)": 54.87, "XPQARetrieval (spa-Latn_spa-Latn)": 44.49, "XPQARetrieval (eng-Latn_spa-Latn)": 25.31, "XPQARetrieval (spa-Latn_eng-Latn)": 43.4, "XPQARetrieval (fra-Latn_fra-Latn)": 51.74, "XPQARetrieval (eng-Latn_fra-Latn)": 21.29, "XPQARetrieval (fra-Latn_eng-Latn)": 49.4, "XPQARetrieval (hin-Deva_hin-Deva)": 66.64, "XPQARetrieval (eng-Latn_hin-Deva)": 23.25, "XPQARetrieval (hin-Deva_eng-Latn)": 64.54, "XPQARetrieval (ita-Latn_ita-Latn)": 56.27, "XPQARetrieval (eng-Latn_ita-Latn)": 25.8, "XPQARetrieval (ita-Latn_eng-Latn)": 52.69, "XPQARetrieval (jpn-Hira_jpn-Hira)": 58.6, "XPQARetrieval (eng-Latn_jpn-Hira)": 21.49, "XPQARetrieval (jpn-Hira_eng-Latn)": 52.41, "XPQARetrieval (kor-Hang_kor-Hang)": 27.66, "XPQARetrieval (eng-Latn_kor-Hang)": 23.33, "XPQARetrieval (kor-Hang_eng-Latn)": 23.96, "XPQARetrieval (pol-Latn_pol-Latn)": 37.33, "XPQARetrieval (eng-Latn_pol-Latn)": 16.19, "XPQARetrieval (pol-Latn_eng-Latn)": 37.7, "XPQARetrieval (por-Latn_por-Latn)": 38.49, "XPQARetrieval (eng-Latn_por-Latn)": 19.41, "XPQARetrieval (por-Latn_eng-Latn)": 37.33, "XPQARetrieval (tam-Taml_tam-Taml)": 37.32, "XPQARetrieval (eng-Latn_tam-Taml)": 20.53, "XPQARetrieval (tam-Taml_eng-Latn)": 30.14, "XPQARetrieval (cmn-Hans_cmn-Hans)": 50.7, "XPQARetrieval (eng-Latn_cmn-Hans)": 20.59, "XPQARetrieval (cmn-Hans_eng-Latn)": 48.23, "XPQARetrieval (fr)": 51.74 } ] }, "STS": { "spearman": [ { "Model": "LaBSE", "AFQMC (cmn-Hans)": 21.02, "ATEC (cmn-Hans)": 26.61, "BIOSSES": 78.7, "BQ (cmn-Hans)": 42.6, "CDSC-R (pol-Latn)": 85.53, "CDSC-R": 85.53, "LCQMC (cmn-Hans)": 52.19, "PAWSX (cmn-Hans)": 10.23, "RUParaPhraserSTS (rus-Cyrl)": 65.74, "RuSTSBenchmarkSTS (rus-Cyrl)": 73.34, "SICK-R": 69.99, "SICK-R-PL (pol-Latn)": 65.9, "SICK-R-PL": 65.9, "SICKFr (fra-Latn)": 69.94, "SICKFr": 69.94, "STS12": 65.08, "STS13": 67.98, "STS14": 64.03, "STS15": 76.59, "STS16": 72.98, "STS17 (nld-Latn_eng-Latn)": 75.22, "STS17 (eng-Latn_tur-Latn)": 72.07, "STS17 (spa-Latn)": 80.83, "STS17 (kor-Hang)": 71.32, "STS17 (eng-Latn_deu-Latn)": 73.85, "STS17 (ita-Latn_eng-Latn)": 76.99, "STS17 (eng-Latn_ara-Arab)": 74.51, "STS17 (ara-Arab)": 69.07, "STS17 (fra-Latn_eng-Latn)": 76.98, "STS17 (spa-Latn_eng-Latn)": 65.71, "STS17 (en-en)": 79.45, "STS17 (ar-ar)": 69.07, "STS17 (en-ar)": 74.51, "STS17 (en-de)": 73.85, "STS17 (en-tr)": 72.07, "STS17 (es-en)": 65.71, "STS17 (es-es)": 80.83, "STS17 (fr-en)": 76.98, "STS17 (it-en)": 76.99, "STS17 (ko-ko)": 71.32, "STS17 (nl-en)": 75.22, "STS22 (cmn-Hans)": 63.02, "STS22 (spa-Latn)": 63.18, "STS22 (en)": 60.97, "STS22 (spa-Latn_ita-Latn)": 69.69, "STS22 (deu-Latn)": 48.58, "STS22 (fra-Latn)": 77.95, "STS22 (ara-Arab)": 57.67, "STS22 (spa-Latn_eng-Latn)": 71.86, "STS22 (pol-Latn_eng-Latn)": 69.41, "STS22 (ita-Latn)": 72.22, "STS22 (pol-Latn)": 39.3, "STS22 (deu-Latn_fra-Latn)": 53.28, "STS22 (deu-Latn_pol-Latn)": 58.69, "STS22 (fra-Latn_pol-Latn)": 61.98, "STS22 (cmn-Hans_eng-Latn)": 64.02, "STS22 (tur-Latn)": 58.15, "STS22 (deu-Latn_eng-Latn)": 50.14, "STS22 (rus-Cyrl)": 57.49, "STS22 (ar)": 57.67, "STS22 (de)": 48.58, "STS22 (de-en)": 50.14, "STS22 (de-fr)": 53.28, "STS22 (de-pl)": 58.69, "STS22 (es)": 63.18, "STS22 (es-en)": 71.86, "STS22 (es-it)": 69.69, "STS22 (fr)": 77.95, "STS22 (fr-pl)": 61.98, "STS22 (it)": 72.22, "STS22 (pl)": 39.28, "STS22 (pl-en)": 69.41, "STS22 (ru)": 57.49, "STS22 (tr)": 58.15, "STS22 (zh)": 63.02, "STS22 (zh-en)": 64.02, "STSB (cmn-Hans)": 68.38, "STSBenchmark": 72.25, "STSBenchmarkMultilingualSTS (en)": 72.25, "STSBenchmarkMultilingualSTS (rus-Cyrl)": 73.06, "STSBenchmarkMultilingualSTS (fra-Latn)": 75.1, "STSBenchmarkMultilingualSTS (spa-Latn)": 72.92, "STSBenchmarkMultilingualSTS (nld-Latn)": 70.22, "STSBenchmarkMultilingualSTS (cmn-Hans)": 69.5, "STSBenchmarkMultilingualSTS (ita-Latn)": 72.97, "STSBenchmarkMultilingualSTS (por-Latn)": 71.65, "STSBenchmarkMultilingualSTS (deu-Latn)": 72.43, "STSBenchmarkMultilingualSTS (pol-Latn)": 72.58, "STSBenchmarkMultilingualSTS (fr)": 75.1 } ] }, "Summarization": { "spearman": [ { "Model": "LaBSE", "SummEval": 31.05, "SummEvalFr (fra-Latn)": 30.16, "SummEvalFr": 30.16 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "LaBSE" } ] } }, "sentence-camembert-large": { "BitextMining": { "f1": [ { "Model": "sentence-camembert-large" } ] }, "Classification": { "accuracy": [ { "Model": "sentence-camembert-large", "AmazonReviewsClassification (fr)": 37.97, "MTOPDomainClassification (fr)": 85.74, "MTOPIntentClassification (fr)": 58.62, "MasakhaNEWSClassification (fra)": 80.62, "MassiveIntentClassification (fr)": 62.65, "MassiveScenarioClassification (fr)": 69.29 } ] }, "Clustering": { "v_measure": [ { "Model": "sentence-camembert-large", "AlloProfClusteringP2P": 62.69, "AlloProfClusteringS2S": 42.06, "HALClusteringS2S": 23.9, "MLSUMClusteringP2P": 42.04, "MLSUMClusteringS2S": 32.29, "MasakhaNEWSClusteringP2P (fra)": 54.51, "MasakhaNEWSClusteringS2S (fra)": 44.73 } ] }, "PairClassification": { "ap": [ { "Model": "sentence-camembert-large", "OpusparcusPC (fr)": 94.63, "PawsXPairClassification (fr)": 59.59 } ] }, "Reranking": { "map": [ { "Model": "sentence-camembert-large", "AlloprofReranking": 57.62, "SyntecReranking": 88.15 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "sentence-camembert-large", "AlloprofRetrieval": 31.62, "BSARDRetrieval": 0.0, "MintakaRetrieval (fr)": 21.87, "SyntecRetrieval": 81.11, "XPQARetrieval (fr)": 65.62 } ] }, "STS": { "spearman": [ { "Model": "sentence-camembert-large", "SICKFr": 77.7, "STS22 (fr)": 81.73, "STSBenchmarkMultilingualSTS (fr)": 85.79 } ] }, "Summarization": { "spearman": [ { "Model": "sentence-camembert-large", "SummEvalFr": 30.88 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "sentence-camembert-large" } ] } }, "nb-bert-large": { "BitextMining": { "f1": [ { "Model": "nb-bert-large", "BornholmBitextMining": 4.53 } ] }, "Classification": { "accuracy": [ { "Model": "nb-bert-large", "AngryTweetsClassification": 52.14, "DKHateClassification": 62.13, "DanishPoliticalCommentsClassification": 35.04, "LccSentimentClassification": 56.27, "MassiveIntentClassification (da)": 57.03, "MassiveIntentClassification (nb)": 62.68, "MassiveIntentClassification (sv)": 55.02, "MassiveScenarioClassification (da)": 60.43, "MassiveScenarioClassification (nb)": 67.44, "MassiveScenarioClassification (sv)": 57.12, "NoRecClassification": 55.46, "NordicLangClassification": 85.27, "NorwegianParliament": 62.58, "ScalaDaClassification": 62.85, "ScalaNbClassification": 66.97 } ] }, "Clustering": { "v_measure": [ { "Model": "nb-bert-large" } ] }, "PairClassification": { "ap": [ { "Model": "nb-bert-large" } ] }, "Reranking": { "map": [ { "Model": "nb-bert-large" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "nb-bert-large" } ] }, "STS": { "spearman": [ { "Model": "nb-bert-large" } ] }, "Summarization": { "spearman": [ { "Model": "nb-bert-large" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "nb-bert-large" } ] } }, "bge-small-zh-v1.5": { "BitextMining": { "f1": [ { "Model": "bge-small-zh-v1.5" } ] }, "Classification": { "accuracy": [ { "Model": "bge-small-zh-v1.5", "AmazonReviewsClassification (zh)": 35.91, "IFlyTek": 45.49, "JDReview": 80.04, "MassiveIntentClassification (zh-CN)": 63.95, "MassiveScenarioClassification (zh-CN)": 70.8, "MultilingualSentiment": 63.06, "OnlineShopping": 85.05, "TNews": 48.15, "Waimai": 83.18 } ] }, "Clustering": { "v_measure": [ { "Model": "bge-small-zh-v1.5", "CLSClusteringP2P": 38.14, "CLSClusteringS2S": 35.14, "ThuNewsClusteringP2P": 54.22, "ThuNewsClusteringS2S": 49.22 } ] }, "PairClassification": { "ap": [ { "Model": "bge-small-zh-v1.5", "Cmnli": 76.24, "Ocnli": 64.57 } ] }, "Reranking": { "map": [ { "Model": "bge-small-zh-v1.5", "CMedQAv1": 77.4, "CMedQAv2": 79.86, "MMarcoReranking": 20.5, "T2Reranking": 65.9 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "bge-small-zh-v1.5", "CmedqaRetrieval": 35.11, "CovidRetrieval": 70.14, "DuRetrieval": 77.28, "EcomRetrieval": 55.71, "MMarcoRetrieval": 63.48, "MedicalRetrieval": 49.8, "T2Retrieval": 76.43, "VideoRetrieval": 66.19 } ] }, "STS": { "spearman": [ { "Model": "bge-small-zh-v1.5", "AFQMC": 33.42, "ATEC": 43.01, "BQ": 55.22, "LCQMC": 72.19, "PAWSX": 9.26, "QBQTC": 35.29, "STS22 (zh)": 67.72, "STSB": 76.73 } ] }, "Summarization": { "spearman": [ { "Model": "bge-small-zh-v1.5" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "bge-small-zh-v1.5" } ] } }, "bm25": { "BitextMining": { "f1": [ { "Model": "bm25" } ] }, "Classification": { "accuracy": [ { "Model": "bm25" } ] }, "Clustering": { "v_measure": [ { "Model": "bm25" } ] }, "PairClassification": { "ap": [ { "Model": "bm25" } ] }, "Reranking": { "map": [ { "Model": "bm25" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "bm25", "BrightRetrieval (robotics)": 13.53, "BrightRetrieval (pony)": 7.93, "BrightRetrieval (leetcode)": 24.37, "BrightRetrieval (earth_science)": 27.06, "BrightRetrieval (stackoverflow)": 16.55, "BrightRetrieval (economics)": 14.87, "BrightRetrieval (theoremqa_questions)": 9.78, "BrightRetrieval (theoremqa_theorems)": 4.25, "BrightRetrieval (psychology)": 12.51, "BrightRetrieval (sustainable_living)": 15.22, "BrightRetrieval (biology)": 19.19, "BrightRetrieval (aops)": 6.2 } ] }, "STS": { "spearman": [ { "Model": "bm25" } ] }, "Summarization": { "spearman": [ { "Model": "bm25" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "bm25", "Core17InstructionRetrieval": -1.06, "News21InstructionRetrieval": -2.15, "Robust04InstructionRetrieval": -3.06 } ] } }, "paraphrase-multilingual-MiniLM-L12-v2": { "BitextMining": { "f1": [ { "Model": "paraphrase-multilingual-MiniLM-L12-v2", "BUCC (de-en)": 97.11, "BUCC (fr-en)": 94.99, "BUCC (ru-en)": 95.06, "BUCC (zh-en)": 95.63, "BornholmBitextMining (dan-Latn)": 19.67, "Tatoeba (nds-Latn_eng-Latn)": 32.16, "Tatoeba (dtp-Latn_eng-Latn)": 5.69, "Tatoeba (fry-Latn_eng-Latn)": 31.13, "Tatoeba (rus-Cyrl_eng-Latn)": 91.87, "Tatoeba (cmn-Hans_eng-Latn)": 94.93, "Tatoeba (afr-Latn_eng-Latn)": 58.22, "Tatoeba (ast-Latn_eng-Latn)": 62.17, "Tatoeba (kab-Latn_eng-Latn)": 1.16, "Tatoeba (cor-Latn_eng-Latn)": 3.42, "Tatoeba (est-Latn_eng-Latn)": 97.33, "Tatoeba (cym-Latn_eng-Latn)": 13.25, "Tatoeba (dsb-Latn_eng-Latn)": 33.43, "Tatoeba (oci-Latn_eng-Latn)": 38.57, "Tatoeba (zsm-Latn_eng-Latn)": 95.31, "Tatoeba (yid-Hebr_eng-Latn)": 14.38, "Tatoeba (bel-Cyrl_eng-Latn)": 67.73, "Tatoeba (gle-Latn_eng-Latn)": 11.62, "Tatoeba (slv-Latn_eng-Latn)": 96.92, "Tatoeba (lvs-Latn_eng-Latn)": 97.87, "Tatoeba (orv-Cyrl_eng-Latn)": 15.1, "Tatoeba (bul-Cyrl_eng-Latn)": 92.65, "Tatoeba (tgl-Latn_eng-Latn)": 13.09, "Tatoeba (ind-Latn_eng-Latn)": 92.74, "Tatoeba (mon-Cyrl_eng-Latn)": 95.04, "Tatoeba (fao-Latn_eng-Latn)": 27.51, "Tatoeba (fin-Latn_eng-Latn)": 93.1, "Tatoeba (srp-Cyrl_eng-Latn)": 92.24, "Tatoeba (bos-Latn_eng-Latn)": 93.27, "Tatoeba (kor-Hang_eng-Latn)": 92.52, "Tatoeba (cat-Latn_eng-Latn)": 94.42, "Tatoeba (por-Latn_eng-Latn)": 92.13, "Tatoeba (spa-Latn_eng-Latn)": 95.42, "Tatoeba (ukr-Cyrl_eng-Latn)": 92.82, "Tatoeba (war-Latn_eng-Latn)": 7.25, "Tatoeba (hsb-Latn_eng-Latn)": 36.1, "Tatoeba (dan-Latn_eng-Latn)": 94.8, "Tatoeba (nov-Latn_eng-Latn)": 47.99, "Tatoeba (kat-Geor_eng-Latn)": 95.44, "Tatoeba (gla-Latn_eng-Latn)": 3.61, "Tatoeba (ron-Latn_eng-Latn)": 95.3, "Tatoeba (glg-Latn_eng-Latn)": 94.0, "Tatoeba (vie-Latn_eng-Latn)": 95.12, "Tatoeba (pol-Latn_eng-Latn)": 94.28, "Tatoeba (hrv-Latn_eng-Latn)": 95.98, "Tatoeba (fra-Latn_eng-Latn)": 91.72, "Tatoeba (hye-Armn_eng-Latn)": 93.28, "Tatoeba (ile-Latn_eng-Latn)": 57.71, "Tatoeba (arz-Arab_eng-Latn)": 51.26, "Tatoeba (nob-Latn_eng-Latn)": 97.73, "Tatoeba (amh-Ethi_eng-Latn)": 36.21, "Tatoeba (nld-Latn_eng-Latn)": 94.58, "Tatoeba (swg-Latn_eng-Latn)": 26.31, "Tatoeba (cha-Latn_eng-Latn)": 15.98, "Tatoeba (nno-Latn_eng-Latn)": 76.34, "Tatoeba (mal-Mlym_eng-Latn)": 32.2, "Tatoeba (urd-Arab_eng-Latn)": 94.57, "Tatoeba (uzb-Latn_eng-Latn)": 17.14, "Tatoeba (swe-Latn_eng-Latn)": 94.42, "Tatoeba (wuu-Hans_eng-Latn)": 76.0, "Tatoeba (ceb-Latn_eng-Latn)": 8.05, "Tatoeba (hin-Deva_eng-Latn)": 97.62, "Tatoeba (ces-Latn_eng-Latn)": 95.12, "Tatoeba (arq-Arab_eng-Latn)": 18.6, "Tatoeba (jav-Latn_eng-Latn)": 17.04, "Tatoeba (swh-Latn_eng-Latn)": 14.48, "Tatoeba (kzj-Latn_eng-Latn)": 6.24, "Tatoeba (jpn-Jpan_eng-Latn)": 90.41, "Tatoeba (xho-Latn_eng-Latn)": 4.52, "Tatoeba (csb-Latn_eng-Latn)": 21.56, "Tatoeba (max-Deva_eng-Latn)": 45.25, "Tatoeba (ben-Beng_eng-Latn)": 36.48, "Tatoeba (ara-Arab_eng-Latn)": 87.93, "Tatoeba (kur-Latn_eng-Latn)": 46.94, "Tatoeba (lit-Latn_eng-Latn)": 93.16, "Tatoeba (isl-Latn_eng-Latn)": 24.07, "Tatoeba (cbk-Latn_eng-Latn)": 55.37, "Tatoeba (uig-Arab_eng-Latn)": 24.39, "Tatoeba (mhr-Cyrl_eng-Latn)": 6.89, "Tatoeba (slk-Latn_eng-Latn)": 95.15, "Tatoeba (tha-Thai_eng-Latn)": 96.72, "Tatoeba (ell-Grek_eng-Latn)": 95.43, "Tatoeba (pam-Latn_eng-Latn)": 5.41, "Tatoeba (pes-Arab_eng-Latn)": 92.59, "Tatoeba (yue-Hant_eng-Latn)": 71.45, "Tatoeba (tur-Latn_eng-Latn)": 95.08, "Tatoeba (tel-Telu_eng-Latn)": 36.4, "Tatoeba (eus-Latn_eng-Latn)": 23.18, "Tatoeba (ina-Latn_eng-Latn)": 79.13, "Tatoeba (aze-Latn_eng-Latn)": 62.1, "Tatoeba (lfn-Latn_eng-Latn)": 47.02, "Tatoeba (heb-Hebr_eng-Latn)": 86.88, "Tatoeba (mar-Deva_eng-Latn)": 92.38, "Tatoeba (sqi-Latn_eng-Latn)": 98.17, "Tatoeba (tat-Cyrl_eng-Latn)": 10.25, "Tatoeba (lat-Latn_eng-Latn)": 19.47, "Tatoeba (tzl-Latn_eng-Latn)": 25.46, "Tatoeba (tuk-Latn_eng-Latn)": 15.16, "Tatoeba (ang-Latn_eng-Latn)": 10.24, "Tatoeba (bre-Latn_eng-Latn)": 5.56, "Tatoeba (ber-Tfng_eng-Latn)": 4.43, "Tatoeba (gsw-Latn_eng-Latn)": 25.74, "Tatoeba (ita-Latn_eng-Latn)": 93.05, "Tatoeba (awa-Deva_eng-Latn)": 33.43, "Tatoeba (tam-Taml_eng-Latn)": 24.64, "Tatoeba (mkd-Cyrl_eng-Latn)": 91.0, "Tatoeba (hun-Latn_eng-Latn)": 91.58, "Tatoeba (pms-Latn_eng-Latn)": 30.7, "Tatoeba (epo-Latn_eng-Latn)": 41.73, "Tatoeba (ido-Latn_eng-Latn)": 40.25, "Tatoeba (khm-Khmr_eng-Latn)": 32.11, "Tatoeba (kaz-Cyrl_eng-Latn)": 34.89, "Tatoeba (deu-Latn_eng-Latn)": 97.02, "Tatoeba (afr-eng)": 58.22, "Tatoeba (amh-eng)": 36.21, "Tatoeba (ang-eng)": 10.24, "Tatoeba (ara-eng)": 87.93, "Tatoeba (arq-eng)": 18.6, "Tatoeba (arz-eng)": 51.26, "Tatoeba (ast-eng)": 62.17, "Tatoeba (awa-eng)": 33.43, "Tatoeba (aze-eng)": 62.1, "Tatoeba (bel-eng)": 67.73, "Tatoeba (ben-eng)": 36.48, "Tatoeba (ber-eng)": 4.43, "Tatoeba (bos-eng)": 93.27, "Tatoeba (bre-eng)": 5.56, "Tatoeba (bul-eng)": 92.65, "Tatoeba (cat-eng)": 94.42, "Tatoeba (cbk-eng)": 55.37, "Tatoeba (ceb-eng)": 8.05, "Tatoeba (ces-eng)": 95.12, "Tatoeba (cha-eng)": 15.98, "Tatoeba (cmn-eng)": 94.93, "Tatoeba (cor-eng)": 3.42, "Tatoeba (csb-eng)": 21.56, "Tatoeba (cym-eng)": 13.25, "Tatoeba (dan-eng)": 94.8, "Tatoeba (deu-eng)": 97.02, "Tatoeba (dsb-eng)": 33.43, "Tatoeba (dtp-eng)": 5.69, "Tatoeba (ell-eng)": 95.43, "Tatoeba (epo-eng)": 41.73, "Tatoeba (est-eng)": 97.33, "Tatoeba (eus-eng)": 23.18, "Tatoeba (fao-eng)": 27.51, "Tatoeba (fin-eng)": 93.1, "Tatoeba (fra-eng)": 91.72, "Tatoeba (fry-eng)": 31.13, "Tatoeba (gla-eng)": 3.61, "Tatoeba (gle-eng)": 11.62, "Tatoeba (glg-eng)": 94.0, "Tatoeba (gsw-eng)": 25.74, "Tatoeba (heb-eng)": 86.88, "Tatoeba (hin-eng)": 97.62, "Tatoeba (hrv-eng)": 95.98, "Tatoeba (hsb-eng)": 36.1, "Tatoeba (hun-eng)": 91.58, "Tatoeba (hye-eng)": 93.28, "Tatoeba (ido-eng)": 40.25, "Tatoeba (ile-eng)": 57.71, "Tatoeba (ina-eng)": 79.13, "Tatoeba (ind-eng)": 92.74, "Tatoeba (isl-eng)": 24.07, "Tatoeba (ita-eng)": 93.05, "Tatoeba (jav-eng)": 17.04, "Tatoeba (jpn-eng)": 90.41, "Tatoeba (kab-eng)": 1.16, "Tatoeba (kat-eng)": 95.44, "Tatoeba (kaz-eng)": 34.89, "Tatoeba (khm-eng)": 32.11, "Tatoeba (kor-eng)": 92.52, "Tatoeba (kur-eng)": 46.94, "Tatoeba (kzj-eng)": 6.24, "Tatoeba (lat-eng)": 19.47, "Tatoeba (lfn-eng)": 47.02, "Tatoeba (lit-eng)": 93.16, "Tatoeba (lvs-eng)": 97.87, "Tatoeba (mal-eng)": 32.2, "Tatoeba (mar-eng)": 92.38, "Tatoeba (max-eng)": 45.25, "Tatoeba (mhr-eng)": 6.89, "Tatoeba (mkd-eng)": 91.0, "Tatoeba (mon-eng)": 95.04, "Tatoeba (nds-eng)": 32.16, "Tatoeba (nld-eng)": 94.58, "Tatoeba (nno-eng)": 76.34, "Tatoeba (nob-eng)": 97.73, "Tatoeba (nov-eng)": 47.99, "Tatoeba (oci-eng)": 38.57, "Tatoeba (orv-eng)": 15.1, "Tatoeba (pam-eng)": 5.41, "Tatoeba (pes-eng)": 92.59, "Tatoeba (pms-eng)": 30.7, "Tatoeba (pol-eng)": 94.28, "Tatoeba (por-eng)": 92.13, "Tatoeba (ron-eng)": 95.3, "Tatoeba (rus-eng)": 91.87, "Tatoeba (slk-eng)": 95.15, "Tatoeba (slv-eng)": 96.92, "Tatoeba (spa-eng)": 95.42, "Tatoeba (sqi-eng)": 98.17, "Tatoeba (srp-eng)": 92.24, "Tatoeba (swe-eng)": 94.42, "Tatoeba (swg-eng)": 26.31, "Tatoeba (swh-eng)": 14.48, "Tatoeba (tam-eng)": 24.64, "Tatoeba (tat-eng)": 10.25, "Tatoeba (tel-eng)": 36.4, "Tatoeba (tgl-eng)": 13.09, "Tatoeba (tha-eng)": 96.72, "Tatoeba (tuk-eng)": 15.16, "Tatoeba (tur-eng)": 95.08, "Tatoeba (tzl-eng)": 25.46, "Tatoeba (uig-eng)": 24.39, "Tatoeba (ukr-eng)": 92.82, "Tatoeba (urd-eng)": 94.57, "Tatoeba (uzb-eng)": 17.14, "Tatoeba (vie-eng)": 95.12, "Tatoeba (war-eng)": 7.25, "Tatoeba (wuu-eng)": 76.0, "Tatoeba (xho-eng)": 4.52, "Tatoeba (yid-eng)": 14.38, "Tatoeba (yue-eng)": 71.45, "Tatoeba (zsm-eng)": 95.31 } ] }, "Classification": { "accuracy": [ { "Model": "paraphrase-multilingual-MiniLM-L12-v2", "AllegroReviews (pol-Latn)": 30.85, "AllegroReviews": 30.88, "AmazonCounterfactualClassification (en-ext)": 69.99, "AmazonCounterfactualClassification (en)": 71.57, "AmazonCounterfactualClassification (deu-Latn)": 68.36, "AmazonCounterfactualClassification (jpn-Jpan)": 63.37, "AmazonCounterfactualClassification (de)": 68.35, "AmazonCounterfactualClassification (ja)": 63.45, "AmazonPolarityClassification": 69.21, "AmazonReviewsClassification (en)": 35.11, "AmazonReviewsClassification (deu-Latn)": 35.91, "AmazonReviewsClassification (spa-Latn)": 37.49, "AmazonReviewsClassification (fra-Latn)": 35.29, "AmazonReviewsClassification (jpn-Jpan)": 33.21, "AmazonReviewsClassification (cmn-Hans)": 35.24, "AmazonReviewsClassification (de)": 35.91, "AmazonReviewsClassification (es)": 37.49, "AmazonReviewsClassification (fr)": 35.3, "AmazonReviewsClassification (ja)": 33.24, "AmazonReviewsClassification (zh)": 35.26, "AngryTweetsClassification (dan-Latn)": 50.9, "Banking77Classification": 79.77, "CBD (pol-Latn)": 57.71, "CBD": 57.68, "DanishPoliticalCommentsClassification (dan-Latn)": 37.58, "EmotionClassification": 42.37, "GeoreviewClassification (rus-Cyrl)": 38.24, "HeadlineClassification (rus-Cyrl)": 68.3, "IFlyTek (cmn-Hans)": 39.88, "ImdbClassification": 60.46, "InappropriatenessClassification (rus-Cyrl)": 58.18, "JDReview (cmn-Hans)": 70.26, "KinopoiskClassification (rus-Cyrl)": 41.45, "LccSentimentClassification (dan-Latn)": 54.53, "MTOPDomainClassification (en)": 87.06, "MTOPDomainClassification (deu-Latn)": 79.21, "MTOPDomainClassification (spa-Latn)": 83.06, "MTOPDomainClassification (fra-Latn)": 78.64, "MTOPDomainClassification (hin-Deva)": 81.36, "MTOPDomainClassification (tha-Thai)": 79.97, "MTOPDomainClassification (de)": 79.2, "MTOPDomainClassification (es)": 83.04, "MTOPDomainClassification (fr)": 78.63, "MTOPDomainClassification (hi)": 81.36, "MTOPDomainClassification (th)": 79.99, "MTOPIntentClassification (en)": 65.52, "MTOPIntentClassification (deu-Latn)": 54.21, "MTOPIntentClassification (spa-Latn)": 60.3, "MTOPIntentClassification (fra-Latn)": 54.01, "MTOPIntentClassification (hin-Deva)": 59.92, "MTOPIntentClassification (tha-Thai)": 61.97, "MTOPIntentClassification (de)": 54.23, "MTOPIntentClassification (es)": 60.28, "MTOPIntentClassification (fr)": 54.05, "MTOPIntentClassification (hi)": 59.9, "MTOPIntentClassification (th)": 61.96, "MasakhaNEWSClassification (amh-Ethi)": 64.28, "MasakhaNEWSClassification (eng)": 74.7, "MasakhaNEWSClassification (fra-Latn)": 71.68, "MasakhaNEWSClassification (hau-Latn)": 47.96, "MasakhaNEWSClassification (ibo-Latn)": 42.46, "MasakhaNEWSClassification (lin-Latn)": 59.26, "MasakhaNEWSClassification (lug-Latn)": 42.29, "MasakhaNEWSClassification (orm-Ethi)": 34.98, "MasakhaNEWSClassification (pcm-Latn)": 89.54, "MasakhaNEWSClassification (run-Latn)": 47.2, "MasakhaNEWSClassification (sna-Latn)": 57.56, "MasakhaNEWSClassification (som-Latn)": 34.8, "MasakhaNEWSClassification (swa-Latn)": 46.05, "MasakhaNEWSClassification (tir-Ethi)": 27.94, "MasakhaNEWSClassification (xho-Latn)": 44.81, "MasakhaNEWSClassification (yor-Latn)": 52.92, "MasakhaNEWSClassification (fra)": 76.09, "MassiveIntentClassification (en)": 66.89, "MassiveIntentClassification (kat-Geor)": 43.03, "MassiveIntentClassification (vie-Latn)": 56.62, "MassiveIntentClassification (tur-Latn)": 59.91, "MassiveIntentClassification (deu-Latn)": 50.71, "MassiveIntentClassification (isl-Latn)": 30.87, "MassiveIntentClassification (tam-Taml)": 36.82, "MassiveIntentClassification (kan-Knda)": 41.0, "MassiveIntentClassification (mon-Cyrl)": 51.77, "MassiveIntentClassification (pol-Latn)": 59.48, "MassiveIntentClassification (spa-Latn)": 59.7, "MassiveIntentClassification (ben-Beng)": 35.38, "MassiveIntentClassification (por-Latn)": 61.29, "MassiveIntentClassification (amh-Ethi)": 36.77, "MassiveIntentClassification (cym-Latn)": 26.13, "MassiveIntentClassification (ind-Latn)": 59.9, "MassiveIntentClassification (ron-Latn)": 58.44, "MassiveIntentClassification (cmo-Hant)": 58.74, "MassiveIntentClassification (dan-Latn)": 57.75, "MassiveIntentClassification (swe-Latn)": 59.43, "MassiveIntentClassification (ara-Arab)": 45.15, "MassiveIntentClassification (ita-Latn)": 59.66, "MassiveIntentClassification (jpn-Jpan)": 60.9, "MassiveIntentClassification (swa-Latn)": 29.56, "MassiveIntentClassification (cmo-Hans)": 62.0, "MassiveIntentClassification (aze-Latn)": 47.43, "MassiveIntentClassification (hin-Deva)": 58.37, "MassiveIntentClassification (fra-Latn)": 60.24, "MassiveIntentClassification (hun-Latn)": 60.44, "MassiveIntentClassification (jav-Latn)": 32.37, "MassiveIntentClassification (slv-Latn)": 57.34, "MassiveIntentClassification (ell-Grek)": 58.7, "MassiveIntentClassification (hye-Armn)": 51.6, "MassiveIntentClassification (nob-Latn)": 55.52, "MassiveIntentClassification (rus-Cyrl)": 59.06, "MassiveIntentClassification (fas-Arab)": 61.03, "MassiveIntentClassification (mal-Mlym)": 42.44, "MassiveIntentClassification (tha-Thai)": 58.92, "MassiveIntentClassification (afr-Latn)": 45.87, "MassiveIntentClassification (tel-Telu)": 40.77, "MassiveIntentClassification (urd-Arab)": 52.79, "MassiveIntentClassification (tgl-Latn)": 33.67, "MassiveIntentClassification (nld-Latn)": 59.52, "MassiveIntentClassification (fin-Latn)": 57.56, "MassiveIntentClassification (lav-Latn)": 54.72, "MassiveIntentClassification (sqi-Latn)": 56.6, "MassiveIntentClassification (khm-Khmr)": 40.04, "MassiveIntentClassification (msa-Latn)": 54.81, "MassiveIntentClassification (heb-Hebr)": 52.55, "MassiveIntentClassification (mya-Mymr)": 52.03, "MassiveIntentClassification (kor-Kore)": 50.36, "MassiveIntentClassification (pl)": 59.43, "MassiveIntentClassification (fr)": 57.52, "MassiveScenarioClassification (khm-Khmr)": 46.95, "MassiveScenarioClassification (kan-Knda)": 45.72, "MassiveScenarioClassification (isl-Latn)": 37.55, "MassiveScenarioClassification (nob-Latn)": 64.25, "MassiveScenarioClassification (swe-Latn)": 67.14, "MassiveScenarioClassification (nld-Latn)": 65.53, "MassiveScenarioClassification (slv-Latn)": 64.01, "MassiveScenarioClassification (jpn-Jpan)": 66.49, "MassiveScenarioClassification (spa-Latn)": 65.07, "MassiveScenarioClassification (kor-Kore)": 55.71, "MassiveScenarioClassification (fas-Arab)": 65.89, "MassiveScenarioClassification (jav-Latn)": 38.62, "MassiveScenarioClassification (aze-Latn)": 52.09, "MassiveScenarioClassification (kat-Geor)": 50.66, "MassiveScenarioClassification (rus-Cyrl)": 65.25, "MassiveScenarioClassification (fra-Latn)": 66.09, "MassiveScenarioClassification (fin-Latn)": 63.74, "MassiveScenarioClassification (dan-Latn)": 66.87, "MassiveScenarioClassification (ben-Beng)": 41.19, "MassiveScenarioClassification (tur-Latn)": 66.53, "MassiveScenarioClassification (ind-Latn)": 66.17, "MassiveScenarioClassification (por-Latn)": 65.83, "MassiveScenarioClassification (cym-Latn)": 31.71, "MassiveScenarioClassification (pol-Latn)": 65.04, "MassiveScenarioClassification (sqi-Latn)": 64.34, "MassiveScenarioClassification (mal-Mlym)": 47.73, "MassiveScenarioClassification (tel-Telu)": 46.49, "MassiveScenarioClassification (en)": 71.54, "MassiveScenarioClassification (ell-Grek)": 66.14, "MassiveScenarioClassification (tha-Thai)": 67.05, "MassiveScenarioClassification (tgl-Latn)": 37.39, "MassiveScenarioClassification (msa-Latn)": 61.73, "MassiveScenarioClassification (ara-Arab)": 51.71, "MassiveScenarioClassification (heb-Hebr)": 59.22, "MassiveScenarioClassification (deu-Latn)": 57.4, "MassiveScenarioClassification (mya-Mymr)": 59.09, "MassiveScenarioClassification (ron-Latn)": 64.2, "MassiveScenarioClassification (hin-Deva)": 65.23, "MassiveScenarioClassification (hun-Latn)": 66.57, "MassiveScenarioClassification (afr-Latn)": 53.63, "MassiveScenarioClassification (tam-Taml)": 42.63, "MassiveScenarioClassification (hye-Armn)": 56.11, "MassiveScenarioClassification (vie-Latn)": 60.73, "MassiveScenarioClassification (lav-Latn)": 59.82, "MassiveScenarioClassification (mon-Cyrl)": 57.07, "MassiveScenarioClassification (urd-Arab)": 60.41, "MassiveScenarioClassification (cmo-Hans)": 67.45, "MassiveScenarioClassification (swa-Latn)": 34.86, "MassiveScenarioClassification (amh-Ethi)": 41.89, "MassiveScenarioClassification (ita-Latn)": 65.01, "MassiveScenarioClassification (cmo-Hant)": 65.72, "MassiveScenarioClassification (pl)": 65.04, "MassiveScenarioClassification (fr)": 64.52, "MultilingualSentiment (cmn-Hans)": 61.9, "NoRecClassification (nob-Latn)": 46.7, "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 42.52, "OnlineShopping (cmn-Hans)": 84.89, "PAC (pol-Latn)": 65.75, "PAC": 65.76, "PolEmo2.0-IN (pol-Latn)": 57.76, "PolEmo2.0-IN": 57.76, "PolEmo2.0-OUT (pol-Latn)": 28.66, "PolEmo2.0-OUT": 28.7, "RuReviewsClassification (rus-Cyrl)": 58.88, "RuSciBenchGRNTIClassification (rus-Cyrl)": 53.19, "RuSciBenchOECDClassification (rus-Cyrl)": 41.41, "TNews (cmn-Hans)": 39.19, "ToxicConversationsClassification": 66.07, "TweetSentimentExtractionClassification": 56.12, "Waimai (cmn-Hans)": 82.27 } ] }, "Clustering": { "v_measure": [ { "Model": "paraphrase-multilingual-MiniLM-L12-v2", "8TagsClustering": 23.24, "AlloProfClusteringP2P": 56.06, "AlloProfClusteringS2S": 42.16, "ArxivClusteringP2P": 38.33, "ArxivClusteringS2S": 31.55, "BiorxivClusteringP2P": 33.49, "BiorxivClusteringS2S": 29.44, "BlurbsClusteringP2P": 32.46, "BlurbsClusteringS2S": 14.33, "GeoreviewClusteringP2P (rus-Cyrl)": 53.35, "HALClusteringS2S": 23.21, "MLSUMClusteringP2P (rus-Cyrl)": 37.0, "MLSUMClusteringP2P": 39.97, "MLSUMClusteringS2S (rus-Cyrl)": 38.16, "MLSUMClusteringS2S": 36.55, "MasakhaNEWSClusteringP2P (amh-Ethi)": 40.36, "MasakhaNEWSClusteringP2P (eng)": 49.96, "MasakhaNEWSClusteringP2P (fra-Latn)": 40.85, "MasakhaNEWSClusteringP2P (hau-Latn)": 19.39, "MasakhaNEWSClusteringP2P (ibo-Latn)": 33.81, "MasakhaNEWSClusteringP2P (lin-Latn)": 51.98, "MasakhaNEWSClusteringP2P (lug-Latn)": 41.88, "MasakhaNEWSClusteringP2P (orm-Ethi)": 22.23, "MasakhaNEWSClusteringP2P (pcm-Latn)": 64.64, "MasakhaNEWSClusteringP2P (run-Latn)": 48.03, "MasakhaNEWSClusteringP2P (sna-Latn)": 44.62, "MasakhaNEWSClusteringP2P (som-Latn)": 27.54, "MasakhaNEWSClusteringP2P (swa-Latn)": 22.69, "MasakhaNEWSClusteringP2P (tir-Ethi)": 42.02, "MasakhaNEWSClusteringP2P (xho-Latn)": 27.68, "MasakhaNEWSClusteringP2P (yor-Latn)": 27.29, "MasakhaNEWSClusteringP2P (fra)": 36.58, "MasakhaNEWSClusteringS2S (amh-Ethi)": 42.28, "MasakhaNEWSClusteringS2S (eng)": 25.74, "MasakhaNEWSClusteringS2S (fra-Latn)": 36.5, "MasakhaNEWSClusteringS2S (hau-Latn)": 9.2, "MasakhaNEWSClusteringS2S (ibo-Latn)": 33.37, "MasakhaNEWSClusteringS2S (lin-Latn)": 47.76, "MasakhaNEWSClusteringS2S (lug-Latn)": 45.15, "MasakhaNEWSClusteringS2S (orm-Ethi)": 22.08, "MasakhaNEWSClusteringS2S (pcm-Latn)": 58.42, "MasakhaNEWSClusteringS2S (run-Latn)": 47.41, "MasakhaNEWSClusteringS2S (sna-Latn)": 43.0, "MasakhaNEWSClusteringS2S (som-Latn)": 26.22, "MasakhaNEWSClusteringS2S (swa-Latn)": 13.53, "MasakhaNEWSClusteringS2S (tir-Ethi)": 42.4, "MasakhaNEWSClusteringS2S (xho-Latn)": 21.03, "MasakhaNEWSClusteringS2S (yor-Latn)": 27.04, "MasakhaNEWSClusteringS2S (fra)": 33.9, "MedrxivClusteringP2P": 31.52, "MedrxivClusteringS2S": 30.87, "RedditClustering": 42.02, "RedditClusteringP2P": 50.73, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 48.22, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 41.68, "StackExchangeClustering": 49.6, "StackExchangeClusteringP2P": 31.69, "TenKGnadClusteringP2P": 36.13, "TenKGnadClusteringS2S": 22.26, "TwentyNewsgroupsClustering": 39.28 } ] }, "PairClassification": { "ap": [ { "Model": "paraphrase-multilingual-MiniLM-L12-v2", "CDSC-E (pol-Latn)": 72.22, "CDSC-E": 72.22, "OpusparcusPC (deu-Latn)": 96.63, "OpusparcusPC (en)": 98.59, "OpusparcusPC (fin-Latn)": 93.2, "OpusparcusPC (fra-Latn)": 92.01, "OpusparcusPC (rus-Cyrl)": 88.25, "OpusparcusPC (swe-Latn)": 93.99, "OpusparcusPC (fr)": 92.01, "PPC": 91.8, "PSC (pol-Latn)": 97.14, "PSC": 97.14, "PawsXPairClassification (deu-Latn)": 53.26, "PawsXPairClassification (en)": 55.94, "PawsXPairClassification (spa-Latn)": 54.61, "PawsXPairClassification (fra-Latn)": 56.94, "PawsXPairClassification (jpn-Hira)": 48.66, "PawsXPairClassification (kor-Hang)": 49.69, "PawsXPairClassification (cmn-Hans)": 54.3, "PawsXPairClassification (fr)": 56.94, "SICK-E-PL (pol-Latn)": 71.94, "SICK-E-PL": 71.94, "SprintDuplicateQuestions": 89.46, "TERRa (rus-Cyrl)": 58.56, "TwitterSemEval2015": 62.06, "TwitterURLCorpus": 83.83 } ] }, "Reranking": { "map": [ { "Model": "paraphrase-multilingual-MiniLM-L12-v2", "AlloprofReranking (fra-Latn)": 62.42, "AlloprofReranking": 49.01, "AskUbuntuDupQuestions": 60.49, "MMarcoReranking (cmn-Hans)": 16.14, "MindSmallReranking": 30.37, "RuBQReranking (rus-Cyrl)": 52.8, "SciDocsRR": 77.78, "StackOverflowDupQuestions": 45.85, "SyntecReranking (fra-Latn)": 72.5, "SyntecReranking": 75.03, "T2Reranking (cmn-Hans)": 65.28 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "paraphrase-multilingual-MiniLM-L12-v2", "AILACasedocs": 13.66, "AILAStatutes": 20.52, "ARCChallenge": 6.19, "AlloprofRetrieval (fra-Latn)": 26.63, "AlloprofRetrieval": 26.63, "AlphaNLI": 20.89, "ArguAna": 44.88, "ArguAna-PL (pol-Latn)": 37.86, "ArguAna-PL": 37.83, "BSARDRetrieval (fra-Latn)": 9.6, "BSARDRetrieval": 0.0, "CQADupstackRetrieval": 30.7, "ClimateFEVER": 18.49, "CmedqaRetrieval (cmn-Hans)": 10.78, "CovidRetrieval (cmn-Hans)": 30.11, "DBPedia": 22.63, "DBPedia-PL": 18.0, "DuRetrieval (cmn-Hans)": 34.72, "EcomRetrieval (cmn-Hans)": 13.32, "FEVER": 52.66, "FiQA-PL (pol-Latn)": 12.49, "FiQA-PL": 12.49, "FiQA2018": 20.33, "GerDaLIRSmall (deu-Latn)": 2.62, "HellaSwag": 16.98, "HotpotQA": 30.01, "HotpotQA-PL": 22.76, "LEMBNarrativeQARetrieval": 13.82, "LEMBNeedleRetrieval": 13.5, "LEMBPasskeyRetrieval": 8.25, "LEMBQMSumRetrieval": 11.02, "LEMBSummScreenFDRetrieval": 38.12, "LEMBWikimQARetrieval": 40.84, "LeCaRDv2 (zho-Hans)": 32.03, "LegalBenchConsumerContractsQA": 49.81, "LegalBenchCorporateLobbying": 88.51, "LegalQuAD (deu-Latn)": 13.31, "LegalSummarization": 54.97, "MMarcoRetrieval (cmn-Hans)": 46.62, "MSMARCO": 23.72, "MSMARCO-PL": 10.39, "MedicalRetrieval (cmn-Hans)": 15.46, "MintakaRetrieval (ara-Arab)": 12.61, "MintakaRetrieval (deu-Latn)": 21.77, "MintakaRetrieval (spa-Latn)": 21.59, "MintakaRetrieval (fra-Latn)": 21.53, "MintakaRetrieval (hin-Deva)": 16.76, "MintakaRetrieval (ita-Latn)": 22.23, "MintakaRetrieval (jpn-Hira)": 14.33, "MintakaRetrieval (por-Latn)": 22.52, "MintakaRetrieval (fr)": 21.53, "NFCorpus": 23.45, "NFCorpus-PL (pol-Latn)": 17.17, "NFCorpus-PL": 17.16, "NQ": 29.8, "NQ-PL": 12.56, "PIQA": 15.79, "Quail": 2.96, "Quora-PL": 77.18, "QuoraRetrieval": 86.55, "RARbCode": 8.48, "RARbMath": 30.02, "RiaNewsRetrieval (rus-Cyrl)": 44.82, "RuBQRetrieval (rus-Cyrl)": 29.7, "SCIDOCS": 0.03, "SCIDOCS-PL (pol-Latn)": 10.26, "SCIDOCS-PL": 10.26, "SIQA": 0.88, "SciFact": 48.37, "SciFact-PL (pol-Latn)": 40.24, "SciFact-PL": 40.24, "SpartQA": 4.94, "SyntecRetrieval (fra-Latn)": 65.54, "SyntecRetrieval": 65.54, "T2Retrieval (cmn-Hans)": 30.31, "TRECCOVID": 39.12, "TRECCOVID-PL (pol-Latn)": 34.23, "TRECCOVID-PL": 34.38, "TempReasonL1": 1.43, "TempReasonL2Fact": 6.21, "TempReasonL2Pure": 0.22, "TempReasonL3Fact": 6.77, "TempReasonL3Pure": 4.9, "Touche2020": 16.06, "VideoRetrieval (cmn-Hans)": 14.71, "WinoGrande": 46.52, "XPQARetrieval (ara-Arab_ara-Arab)": 22.97, "XPQARetrieval (eng-Latn_ara-Arab)": 17.17, "XPQARetrieval (ara-Arab_eng-Latn)": 25.5, "XPQARetrieval (deu-Latn_deu-Latn)": 42.62, "XPQARetrieval (eng-Latn_deu-Latn)": 26.52, "XPQARetrieval (deu-Latn_eng-Latn)": 48.73, "XPQARetrieval (spa-Latn_spa-Latn)": 38.24, "XPQARetrieval (eng-Latn_spa-Latn)": 26.09, "XPQARetrieval (spa-Latn_eng-Latn)": 41.51, "XPQARetrieval (fra-Latn_fra-Latn)": 42.51, "XPQARetrieval (eng-Latn_fra-Latn)": 26.09, "XPQARetrieval (fra-Latn_eng-Latn)": 43.08, "XPQARetrieval (hin-Deva_hin-Deva)": 52.09, "XPQARetrieval (eng-Latn_hin-Deva)": 24.08, "XPQARetrieval (hin-Deva_eng-Latn)": 49.11, "XPQARetrieval (ita-Latn_ita-Latn)": 51.63, "XPQARetrieval (eng-Latn_ita-Latn)": 29.34, "XPQARetrieval (ita-Latn_eng-Latn)": 46.53, "XPQARetrieval (jpn-Hira_jpn-Hira)": 51.57, "XPQARetrieval (eng-Latn_jpn-Hira)": 23.87, "XPQARetrieval (jpn-Hira_eng-Latn)": 44.93, "XPQARetrieval (kor-Hang_kor-Hang)": 21.34, "XPQARetrieval (eng-Latn_kor-Hang)": 21.51, "XPQARetrieval (kor-Hang_eng-Latn)": 22.59, "XPQARetrieval (pol-Latn_pol-Latn)": 28.45, "XPQARetrieval (eng-Latn_pol-Latn)": 17.08, "XPQARetrieval (pol-Latn_eng-Latn)": 26.57, "XPQARetrieval (por-Latn_por-Latn)": 32.33, "XPQARetrieval (eng-Latn_por-Latn)": 19.76, "XPQARetrieval (por-Latn_eng-Latn)": 34.2, "XPQARetrieval (tam-Taml_tam-Taml)": 6.36, "XPQARetrieval (eng-Latn_tam-Taml)": 5.36, "XPQARetrieval (tam-Taml_eng-Latn)": 9.03, "XPQARetrieval (cmn-Hans_cmn-Hans)": 44.16, "XPQARetrieval (eng-Latn_cmn-Hans)": 19.03, "XPQARetrieval (cmn-Hans_eng-Latn)": 40.08, "XPQARetrieval (fr)": 42.51 } ] }, "STS": { "spearman": [ { "Model": "paraphrase-multilingual-MiniLM-L12-v2", "AFQMC (cmn-Hans)": 14.3, "ATEC (cmn-Hans)": 18.42, "BIOSSES": 74.18, "BQ (cmn-Hans)": 38.53, "CDSC-R (pol-Latn)": 88.98, "CDSC-R": 88.98, "LCQMC (cmn-Hans)": 63.96, "PAWSX (cmn-Hans)": 10.13, "RUParaPhraserSTS (rus-Cyrl)": 61.87, "RuSTSBenchmarkSTS (rus-Cyrl)": 79.55, "SICK-R": 79.61, "SICK-R-PL (pol-Latn)": 68.77, "SICK-R-PL": 68.77, "SICKFr (fra-Latn)": 75.1, "SICKFr": 75.1, "STS12": 76.02, "STS13": 80.7, "STS14": 78.85, "STS15": 85.84, "STS16": 81.05, "STS17 (fra-Latn_eng-Latn)": 76.59, "STS17 (nld-Latn_eng-Latn)": 81.71, "STS17 (ita-Latn_eng-Latn)": 82.35, "STS17 (kor-Hang)": 77.03, "STS17 (ara-Arab)": 79.16, "STS17 (eng-Latn_ara-Arab)": 81.22, "STS17 (spa-Latn_eng-Latn)": 84.44, "STS17 (spa-Latn)": 85.56, "STS17 (eng-Latn_deu-Latn)": 84.22, "STS17 (en-en)": 86.87, "STS17 (eng-Latn_tur-Latn)": 76.74, "STS17 (ar-ar)": 79.16, "STS17 (en-ar)": 81.22, "STS17 (en-de)": 84.22, "STS17 (en-tr)": 76.74, "STS17 (es-en)": 84.44, "STS17 (es-es)": 85.56, "STS17 (fr-en)": 76.59, "STS17 (it-en)": 82.35, "STS17 (ko-ko)": 77.03, "STS17 (nl-en)": 81.71, "STS22 (ara-Arab)": 46.2, "STS22 (spa-Latn_eng-Latn)": 67.33, "STS22 (cmn-Hans)": 58.75, "STS22 (fra-Latn)": 70.55, "STS22 (en)": 62.07, "STS22 (deu-Latn)": 44.64, "STS22 (pol-Latn)": 33.74, "STS22 (rus-Cyrl)": 57.08, "STS22 (pol-Latn_eng-Latn)": 69.02, "STS22 (deu-Latn_eng-Latn)": 52.65, "STS22 (cmn-Hans_eng-Latn)": 65.71, "STS22 (tur-Latn)": 53.39, "STS22 (spa-Latn)": 56.56, "STS22 (deu-Latn_pol-Latn)": 44.22, "STS22 (spa-Latn_ita-Latn)": 47.67, "STS22 (deu-Latn_fra-Latn)": 51.73, "STS22 (fra-Latn_pol-Latn)": 50.71, "STS22 (ita-Latn)": 55.22, "STS22 (pl)": 33.73, "STS22 (fr)": 70.55, "STSB (cmn-Hans)": 78.91, "STSBenchmark": 84.42, "STSBenchmarkMultilingualSTS (spa-Latn)": 81.1, "STSBenchmarkMultilingualSTS (fra-Latn)": 79.9, "STSBenchmarkMultilingualSTS (cmn-Hans)": 80.47, "STSBenchmarkMultilingualSTS (rus-Cyrl)": 79.32, "STSBenchmarkMultilingualSTS (ita-Latn)": 80.39, "STSBenchmarkMultilingualSTS (pol-Latn)": 78.29, "STSBenchmarkMultilingualSTS (por-Latn)": 80.16, "STSBenchmarkMultilingualSTS (deu-Latn)": 78.87, "STSBenchmarkMultilingualSTS (nld-Latn)": 79.54, "STSBenchmarkMultilingualSTS (en)": 84.42, "STSBenchmarkMultilingualSTS (fr)": 79.9 } ] }, "Summarization": { "spearman": [ { "Model": "paraphrase-multilingual-MiniLM-L12-v2", "SummEval": 30.67, "SummEvalFr (fra-Latn)": 29.2, "SummEvalFr": 29.2 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "paraphrase-multilingual-MiniLM-L12-v2" } ] } }, "nb-bert-base": { "BitextMining": { "f1": [ { "Model": "nb-bert-base", "BornholmBitextMining": 9.88 } ] }, "Classification": { "accuracy": [ { "Model": "nb-bert-base", "AngryTweetsClassification": 52.14, "DKHateClassification": 61.73, "DanishPoliticalCommentsClassification": 34.84, "LccSentimentClassification": 51.4, "MassiveIntentClassification (da)": 56.69, "MassiveIntentClassification (nb)": 60.67, "MassiveIntentClassification (sv)": 53.89, "MassiveScenarioClassification (da)": 61.93, "MassiveScenarioClassification (nb)": 67.31, "MassiveScenarioClassification (sv)": 55.37, "NoRecClassification": 51.32, "NordicLangClassification": 84.69, "NorwegianParliament": 57.41, "ScalaDaClassification": 57.99, "ScalaNbClassification": 62.25 } ] }, "Clustering": { "v_measure": [ { "Model": "nb-bert-base" } ] }, "PairClassification": { "ap": [ { "Model": "nb-bert-base" } ] }, "Reranking": { "map": [ { "Model": "nb-bert-base" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "nb-bert-base" } ] }, "STS": { "spearman": [ { "Model": "nb-bert-base" } ] }, "Summarization": { "spearman": [ { "Model": "nb-bert-base" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "nb-bert-base" } ] } }, "xlm-roberta-large": { "BitextMining": { "f1": [ { "Model": "xlm-roberta-large" } ] }, "Classification": { "accuracy": [ { "Model": "xlm-roberta-large", "AmazonReviewsClassification (fr)": 26.62, "MTOPDomainClassification (fr)": 36.77, "MTOPIntentClassification (fr)": 15.37, "MasakhaNEWSClassification (fra)": 65.76, "MassiveIntentClassification (fr)": 15.82, "MassiveScenarioClassification (fr)": 23.92 } ] }, "Clustering": { "v_measure": [ { "Model": "xlm-roberta-large", "AlloProfClusteringP2P": 56.54, "AlloProfClusteringS2S": 21.18, "BlurbsClusteringP2P": 29.84, "BlurbsClusteringS2S": 7.29, "HALClusteringS2S": 5.94, "MLSUMClusteringP2P": 42.67, "MLSUMClusteringS2S": 18.5, "MasakhaNEWSClusteringP2P (fra)": 34.02, "MasakhaNEWSClusteringS2S (fra)": 21.52, "TenKGnadClusteringP2P": 32.46, "TenKGnadClusteringS2S": 6.16 } ] }, "PairClassification": { "ap": [ { "Model": "xlm-roberta-large", "OpusparcusPC (fr)": 83.73, "PawsXPairClassification (fr)": 53.38 } ] }, "Reranking": { "map": [ { "Model": "xlm-roberta-large", "AlloprofReranking": 28.62, "SyntecReranking": 49.4 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "xlm-roberta-large", "AlloprofRetrieval": 0.52, "BSARDRetrieval": 0.0, "MintakaRetrieval (fr)": 0.9, "SyntecRetrieval": 6.6, "XPQARetrieval (fr)": 12.7 } ] }, "STS": { "spearman": [ { "Model": "xlm-roberta-large", "SICKFr": 50.01, "STS22 (fr)": 55.49, "STSBenchmarkMultilingualSTS (fr)": 42.32 } ] }, "Summarization": { "spearman": [ { "Model": "xlm-roberta-large", "SummEvalFr": 28.89 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "xlm-roberta-large" } ] } }, "text-similarity-curie-001": { "BitextMining": { "f1": [ { "Model": "text-similarity-curie-001" } ] }, "Classification": { "accuracy": [ { "Model": "text-similarity-curie-001" } ] }, "Clustering": { "v_measure": [ { "Model": "text-similarity-curie-001", "RedditClustering": 40.79, "StackExchangeClustering": 55.14, "TwentyNewsgroupsClustering": 37.64 } ] }, "PairClassification": { "ap": [ { "Model": "text-similarity-curie-001", "SprintDuplicateQuestions": 79.85, "TwitterSemEval2015": 69.45, "TwitterURLCorpus": 84.06 } ] }, "Reranking": { "map": [ { "Model": "text-similarity-curie-001", "AskUbuntuDupQuestions": 55.09, "SciDocsRR": 70.93, "StackOverflowDupQuestions": 42.42 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "text-similarity-curie-001", "FiQA2018": 5.14, "NFCorpus": 19.96, "QuoraRetrieval": 83.11, "SciFact": 46.68, "TRECCOVID": 7.61 } ] }, "STS": { "spearman": [ { "Model": "text-similarity-curie-001", "BIOSSES": 77.46, "SICK-R": 77.26, "STSBenchmark": 83.02 } ] }, "Summarization": { "spearman": [ { "Model": "text-similarity-curie-001" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "text-similarity-curie-001" } ] } }, "gbert-base": { "BitextMining": { "f1": [ { "Model": "gbert-base" } ] }, "Classification": { "accuracy": [ { "Model": "gbert-base" } ] }, "Clustering": { "v_measure": [ { "Model": "gbert-base", "BlurbsClusteringP2P": 35.36, "BlurbsClusteringS2S": 11.27, "TenKGnadClusteringP2P": 37.16, "TenKGnadClusteringS2S": 24.23 } ] }, "PairClassification": { "ap": [ { "Model": "gbert-base" } ] }, "Reranking": { "map": [ { "Model": "gbert-base" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "gbert-base" } ] }, "STS": { "spearman": [ { "Model": "gbert-base" } ] }, "Summarization": { "spearman": [ { "Model": "gbert-base" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "gbert-base" } ] } }, "nomic-embed-text-v1.5-64": { "BitextMining": { "f1": [ { "Model": "nomic-embed-text-v1.5-64" } ] }, "Classification": { "accuracy": [ { "Model": "nomic-embed-text-v1.5-64", "AmazonCounterfactualClassification (en)": 66.85, "AmazonPolarityClassification": 85.92, "AmazonReviewsClassification (en)": 41.02, "Banking77Classification": 80.63, "EmotionClassification": 40.55, "ImdbClassification": 76.6, "MTOPDomainClassification (en)": 86.31, "MTOPIntentClassification (en)": 62.77, "MassiveIntentClassification (en)": 64.95, "MassiveScenarioClassification (en)": 70.38, "ToxicConversationsClassification": 66.53, "TweetSentimentExtractionClassification": 55.23 } ] }, "Clustering": { "v_measure": [ { "Model": "nomic-embed-text-v1.5-64", "ArxivClusteringP2P": 41.8, "ArxivClusteringS2S": 32.41, "BiorxivClusteringP2P": 34.81, "BiorxivClusteringS2S": 28.59, "MedrxivClusteringP2P": 32.73, "MedrxivClusteringS2S": 29.91, "RedditClustering": 50.31, "RedditClusteringP2P": 56.57, "StackExchangeClustering": 57.99, "StackExchangeClusteringP2P": 33.64, "TwentyNewsgroupsClustering": 44.61 } ] }, "PairClassification": { "ap": [ { "Model": "nomic-embed-text-v1.5-64", "SprintDuplicateQuestions": 90.06, "TwitterSemEval2015": 71.68, "TwitterURLCorpus": 85.03 } ] }, "Reranking": { "map": [ { "Model": "nomic-embed-text-v1.5-64", "AskUbuntuDupQuestions": 60.79, "MindSmallReranking": 29.7, "SciDocsRR": 75.79, "StackOverflowDupQuestions": 47.42 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "nomic-embed-text-v1.5-64", "ArguAna": 37.16, "CQADupstackRetrieval": 28.72, "ClimateFEVER": 31.48, "DBPedia": 28.19, "FEVER": 70.24, "FiQA2018": 25.78, "HotpotQA": 43.07, "MSMARCO": 35.95, "NFCorpus": 26.03, "NQ": 45.54, "QuoraRetrieval": 85.83, "SCIDOCS": 12.09, "SciFact": 52.71, "TRECCOVID": 67.83, "Touche2020": 23.13 } ] }, "STS": { "spearman": [ { "Model": "nomic-embed-text-v1.5-64", "BIOSSES": 77.18, "SICK-R": 78.76, "STS12": 77.3, "STS13": 84.18, "STS14": 79.37, "STS15": 84.69, "STS16": 83.36, "STS17 (en-en)": 85.73, "STS22 (en)": 63.83, "STSBenchmark": 83.46 } ] }, "Summarization": { "spearman": [ { "Model": "nomic-embed-text-v1.5-64", "SummEval": 28.41 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "nomic-embed-text-v1.5-64" } ] } }, "bert-base-swedish-cased": { "BitextMining": { "f1": [ { "Model": "bert-base-swedish-cased", "BornholmBitextMining": 6.6 } ] }, "Classification": { "accuracy": [ { "Model": "bert-base-swedish-cased", "AngryTweetsClassification": 44.58, "DKHateClassification": 55.53, "DanishPoliticalCommentsClassification": 28.97, "LccSentimentClassification": 41.2, "MassiveIntentClassification (da)": 37.98, "MassiveIntentClassification (nb)": 35.75, "MassiveIntentClassification (sv)": 52.75, "MassiveScenarioClassification (da)": 40.44, "MassiveScenarioClassification (nb)": 35.76, "MassiveScenarioClassification (sv)": 56.09, "NoRecClassification": 43.91, "NordicLangClassification": 62.45, "NorwegianParliament": 57.56, "ScalaDaClassification": 53.53, "ScalaNbClassification": 53.63 } ] }, "Clustering": { "v_measure": [ { "Model": "bert-base-swedish-cased" } ] }, "PairClassification": { "ap": [ { "Model": "bert-base-swedish-cased" } ] }, "Reranking": { "map": [ { "Model": "bert-base-swedish-cased" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "bert-base-swedish-cased" } ] }, "STS": { "spearman": [ { "Model": "bert-base-swedish-cased" } ] }, "Summarization": { "spearman": [ { "Model": "bert-base-swedish-cased" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "bert-base-swedish-cased" } ] } }, "e5-small": { "BitextMining": { "f1": [ { "Model": "e5-small", "BornholmBitextMining": 40.27 } ] }, "Classification": { "accuracy": [ { "Model": "e5-small", "AngryTweetsClassification": 43.6, "DKHateClassification": 57.57, "DanishPoliticalCommentsClassification": 28.37, "LccSentimentClassification": 40.27, "MassiveIntentClassification (da)": 41.89, "MassiveIntentClassification (nb)": 40.25, "MassiveIntentClassification (sv)": 40.07, "MassiveScenarioClassification (da)": 49.93, "MassiveScenarioClassification (nb)": 48.58, "MassiveScenarioClassification (sv)": 47.06, "NoRecClassification": 41.84, "NordicLangClassification": 53.47, "NorwegianParliament": 56.57, "ScalaDaClassification": 50.15, "ScalaNbClassification": 50.03 } ] }, "Clustering": { "v_measure": [ { "Model": "e5-small", "BiorxivClusteringP2P": 36.1, "BiorxivClusteringS2S": 31.51, "MedrxivClusteringP2P": 31.31, "MedrxivClusteringS2S": 28.32, "RedditClustering": 43.27, "RedditClusteringP2P": 57.22, "StackExchangeClustering": 59.6, "StackExchangeClusteringP2P": 30.82, "TwentyNewsgroupsClustering": 37.65 } ] }, "PairClassification": { "ap": [ { "Model": "e5-small" } ] }, "Reranking": { "map": [ { "Model": "e5-small" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "e5-small" } ] }, "STS": { "spearman": [ { "Model": "e5-small" } ] }, "Summarization": { "spearman": [ { "Model": "e5-small" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "e5-small" } ] } }, "universal-sentence-encoder-multilingual-large-3": { "BitextMining": { "f1": [ { "Model": "universal-sentence-encoder-multilingual-large-3" } ] }, "Classification": { "accuracy": [ { "Model": "universal-sentence-encoder-multilingual-large-3", "AmazonReviewsClassification (fr)": 35.09, "MTOPDomainClassification (fr)": 88.19, "MTOPIntentClassification (fr)": 63.64, "MasakhaNEWSClassification (fra)": 72.04, "MassiveIntentClassification (fr)": 65.8, "MassiveScenarioClassification (fr)": 73.47 } ] }, "Clustering": { "v_measure": [ { "Model": "universal-sentence-encoder-multilingual-large-3", "AlloProfClusteringP2P": 54.21, "AlloProfClusteringS2S": 37.95, "HALClusteringS2S": 18.94, "MLSUMClusteringP2P": 41.02, "MLSUMClusteringS2S": 37.97, "MasakhaNEWSClusteringP2P (fra)": 24.09, "MasakhaNEWSClusteringS2S (fra)": 40.24 } ] }, "PairClassification": { "ap": [ { "Model": "universal-sentence-encoder-multilingual-large-3", "OpusparcusPC (fr)": 93.38, "PawsXPairClassification (fr)": 53.62 } ] }, "Reranking": { "map": [ { "Model": "universal-sentence-encoder-multilingual-large-3", "AlloprofReranking": 55.39, "SyntecReranking": 77.13 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "universal-sentence-encoder-multilingual-large-3", "AlloprofRetrieval": 33.78, "BSARDRetrieval": 0.0, "MintakaRetrieval (fr)": 26.21, "SyntecRetrieval": 63.69, "XPQARetrieval (fr)": 65.21 } ] }, "STS": { "spearman": [ { "Model": "universal-sentence-encoder-multilingual-large-3", "SICKFr": 74.39, "STS22 (fr)": 71.11, "STSBenchmarkMultilingualSTS (fr)": 78.16 } ] }, "Summarization": { "spearman": [ { "Model": "universal-sentence-encoder-multilingual-large-3", "SummEvalFr": 28.56 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "universal-sentence-encoder-multilingual-large-3" } ] } }, "multilingual-e5-small": { "BitextMining": { "f1": [ { "Model": "multilingual-e5-small", "BornholmBitextMining (dan-Latn)": 37.15, "BornholmBitextMining": 43.89, "Tatoeba (swh-Latn_eng-Latn)": 65.43, "Tatoeba (jpn-Jpan_eng-Latn)": 77.43, "Tatoeba (tuk-Latn_eng-Latn)": 16.99, "Tatoeba (lat-Latn_eng-Latn)": 37.76, "Tatoeba (mal-Mlym_eng-Latn)": 94.78, "Tatoeba (ast-Latn_eng-Latn)": 62.81, "Tatoeba (est-Latn_eng-Latn)": 56.47, "Tatoeba (cym-Latn_eng-Latn)": 62.3, "Tatoeba (pol-Latn_eng-Latn)": 88.85, "Tatoeba (ukr-Cyrl_eng-Latn)": 82.98, "Tatoeba (ido-Latn_eng-Latn)": 70.07, "Tatoeba (zsm-Latn_eng-Latn)": 91.37, "Tatoeba (bul-Cyrl_eng-Latn)": 85.47, "Tatoeba (dsb-Latn_eng-Latn)": 29.87, "Tatoeba (tha-Thai_eng-Latn)": 90.88, "Tatoeba (arz-Arab_eng-Latn)": 53.35, "Tatoeba (cbk-Latn_eng-Latn)": 55.36, "Tatoeba (pms-Latn_eng-Latn)": 35.47, "Tatoeba (ber-Tfng_eng-Latn)": 18.22, "Tatoeba (slk-Latn_eng-Latn)": 79.86, "Tatoeba (ang-Latn_eng-Latn)": 30.3, "Tatoeba (ind-Latn_eng-Latn)": 88.28, "Tatoeba (cha-Latn_eng-Latn)": 24.88, "Tatoeba (slv-Latn_eng-Latn)": 73.93, "Tatoeba (kab-Latn_eng-Latn)": 18.06, "Tatoeba (ina-Latn_eng-Latn)": 86.39, "Tatoeba (lfn-Latn_eng-Latn)": 51.46, "Tatoeba (hye-Armn_eng-Latn)": 83.81, "Tatoeba (war-Latn_eng-Latn)": 39.14, "Tatoeba (dtp-Latn_eng-Latn)": 6.42, "Tatoeba (nds-Latn_eng-Latn)": 52.46, "Tatoeba (urd-Arab_eng-Latn)": 85.07, "Tatoeba (rus-Cyrl_eng-Latn)": 89.77, "Tatoeba (fao-Latn_eng-Latn)": 56.57, "Tatoeba (cat-Latn_eng-Latn)": 79.3, "Tatoeba (gla-Latn_eng-Latn)": 35.96, "Tatoeba (kur-Latn_eng-Latn)": 39.99, "Tatoeba (cor-Latn_eng-Latn)": 5.24, "Tatoeba (nov-Latn_eng-Latn)": 64.2, "Tatoeba (max-Deva_eng-Latn)": 48.29, "Tatoeba (nno-Latn_eng-Latn)": 70.29, "Tatoeba (kor-Hang_eng-Latn)": 73.74, "Tatoeba (vie-Latn_eng-Latn)": 89.03, "Tatoeba (tur-Latn_eng-Latn)": 88.42, "Tatoeba (spa-Latn_eng-Latn)": 93.01, "Tatoeba (gsw-Latn_eng-Latn)": 40.13, "Tatoeba (yid-Hebr_eng-Latn)": 65.9, "Tatoeba (orv-Cyrl_eng-Latn)": 14.89, "Tatoeba (wuu-Hans_eng-Latn)": 67.3, "Tatoeba (heb-Hebr_eng-Latn)": 73.68, "Tatoeba (arq-Arab_eng-Latn)": 23.62, "Tatoeba (nld-Latn_eng-Latn)": 91.87, "Tatoeba (kaz-Cyrl_eng-Latn)": 70.57, "Tatoeba (mon-Cyrl_eng-Latn)": 77.7, "Tatoeba (fin-Latn_eng-Latn)": 70.23, "Tatoeba (hrv-Latn_eng-Latn)": 84.42, "Tatoeba (fra-Latn_eng-Latn)": 90.51, "Tatoeba (khm-Khmr_eng-Latn)": 44.34, "Tatoeba (amh-Ethi_eng-Latn)": 74.11, "Tatoeba (eus-Latn_eng-Latn)": 50.9, "Tatoeba (lvs-Latn_eng-Latn)": 61.84, "Tatoeba (pes-Arab_eng-Latn)": 85.51, "Tatoeba (tzl-Latn_eng-Latn)": 34.83, "Tatoeba (oci-Latn_eng-Latn)": 38.27, "Tatoeba (ell-Grek_eng-Latn)": 86.81, "Tatoeba (tgl-Latn_eng-Latn)": 77.54, "Tatoeba (uig-Arab_eng-Latn)": 60.59, "Tatoeba (ben-Beng_eng-Latn)": 81.4, "Tatoeba (uzb-Latn_eng-Latn)": 59.11, "Tatoeba (epo-Latn_eng-Latn)": 88.96, "Tatoeba (sqi-Latn_eng-Latn)": 86.21, "Tatoeba (kzj-Latn_eng-Latn)": 6.56, "Tatoeba (mkd-Cyrl_eng-Latn)": 63.74, "Tatoeba (bre-Latn_eng-Latn)": 7.09, "Tatoeba (dan-Latn_eng-Latn)": 86.38, "Tatoeba (mhr-Cyrl_eng-Latn)": 5.58, "Tatoeba (csb-Latn_eng-Latn)": 26.23, "Tatoeba (xho-Latn_eng-Latn)": 63.2, "Tatoeba (swe-Latn_eng-Latn)": 87.46, "Tatoeba (tat-Cyrl_eng-Latn)": 66.8, "Tatoeba (srp-Cyrl_eng-Latn)": 83.06, "Tatoeba (cmn-Hans_eng-Latn)": 89.85, "Tatoeba (ces-Latn_eng-Latn)": 80.99, "Tatoeba (bel-Cyrl_eng-Latn)": 80.89, "Tatoeba (yue-Hant_eng-Latn)": 69.33, "Tatoeba (lit-Latn_eng-Latn)": 59.95, "Tatoeba (tel-Telu_eng-Latn)": 86.82, "Tatoeba (nob-Latn_eng-Latn)": 90.18, "Tatoeba (mar-Deva_eng-Latn)": 85.94, "Tatoeba (ara-Arab_eng-Latn)": 76.09, "Tatoeba (swg-Latn_eng-Latn)": 44.0, "Tatoeba (bos-Latn_eng-Latn)": 81.15, "Tatoeba (pam-Latn_eng-Latn)": 5.76, "Tatoeba (fry-Latn_eng-Latn)": 49.05, "Tatoeba (hun-Latn_eng-Latn)": 74.44, "Tatoeba (ron-Latn_eng-Latn)": 85.68, "Tatoeba (afr-Latn_eng-Latn)": 85.17, "Tatoeba (isl-Latn_eng-Latn)": 62.32, "Tatoeba (aze-Latn_eng-Latn)": 80.79, "Tatoeba (hsb-Latn_eng-Latn)": 36.49, "Tatoeba (tam-Taml_eng-Latn)": 82.82, "Tatoeba (ceb-Latn_eng-Latn)": 42.35, "Tatoeba (jav-Latn_eng-Latn)": 53.39, "Tatoeba (glg-Latn_eng-Latn)": 79.65, "Tatoeba (por-Latn_eng-Latn)": 89.63, "Tatoeba (awa-Deva_eng-Latn)": 74.55, "Tatoeba (hin-Deva_eng-Latn)": 92.36, "Tatoeba (ita-Latn_eng-Latn)": 88.54, "Tatoeba (deu-Latn_eng-Latn)": 97.22, "Tatoeba (gle-Latn_eng-Latn)": 56.32, "Tatoeba (kat-Geor_eng-Latn)": 77.6, "Tatoeba (ile-Latn_eng-Latn)": 70.31 } ] }, "Classification": { "accuracy": [ { "Model": "multilingual-e5-small", "AllegroReviews (pol-Latn)": 37.33, "AllegroReviews": 37.42, "AmazonCounterfactualClassification (en-ext)": 73.07, "AmazonCounterfactualClassification (en)": 71.87, "AmazonCounterfactualClassification (deu-Latn)": 71.72, "AmazonCounterfactualClassification (jpn-Jpan)": 61.46, "AmazonPolarityClassification": 88.61, "AmazonReviewsClassification (en)": 45.75, "AmazonReviewsClassification (deu-Latn)": 41.07, "AmazonReviewsClassification (spa-Latn)": 41.37, "AmazonReviewsClassification (fra-Latn)": 39.47, "AmazonReviewsClassification (jpn-Jpan)": 38.55, "AmazonReviewsClassification (cmn-Hans)": 38.31, "AmazonReviewsClassification (fr)": 39.68, "AngryTweetsClassification (dan-Latn)": 56.27, "AngryTweetsClassification": 53.57, "Banking77Classification": 70.44, "CBD (pol-Latn)": 63.33, "CBD": 63.25, "DKHateClassification": 60.73, "DanishPoliticalCommentsClassification (dan-Latn)": 34.82, "DanishPoliticalCommentsClassification": 34.38, "EmotionClassification": 42.86, "GeoreviewClassification (rus-Cyrl)": 44.66, "HeadlineClassification (rus-Cyrl)": 73.94, "IFlyTek (cmn-Hans)": 40.74, "IFlyTek": 47.35, "ImdbClassification": 79.57, "InappropriatenessClassification (rus-Cyrl)": 59.16, "JDReview (cmn-Hans)": 78.37, "JDReview": 79.34, "KinopoiskClassification (rus-Cyrl)": 49.96, "LccSentimentClassification (dan-Latn)": 58.6, "LccSentimentClassification": 57.87, "MTOPDomainClassification (en)": 88.99, "MTOPDomainClassification (deu-Latn)": 86.15, "MTOPDomainClassification (spa-Latn)": 85.53, "MTOPDomainClassification (fra-Latn)": 81.5, "MTOPDomainClassification (hin-Deva)": 84.07, "MTOPDomainClassification (tha-Thai)": 83.16, "MTOPDomainClassification (fr)": 81.2, "MTOPIntentClassification (en)": 56.69, "MTOPIntentClassification (deu-Latn)": 55.88, "MTOPIntentClassification (spa-Latn)": 53.15, "MTOPIntentClassification (fra-Latn)": 44.35, "MTOPIntentClassification (hin-Deva)": 52.26, "MTOPIntentClassification (tha-Thai)": 54.61, "MTOPIntentClassification (fr)": 46.01, "MasakhaNEWSClassification (amh-Ethi)": 84.28, "MasakhaNEWSClassification (eng)": 75.61, "MasakhaNEWSClassification (fra-Latn)": 74.67, "MasakhaNEWSClassification (hau-Latn)": 73.08, "MasakhaNEWSClassification (ibo-Latn)": 63.9, "MasakhaNEWSClassification (lin-Latn)": 73.37, "MasakhaNEWSClassification (lug-Latn)": 67.89, "MasakhaNEWSClassification (orm-Ethi)": 68.77, "MasakhaNEWSClassification (pcm-Latn)": 90.79, "MasakhaNEWSClassification (run-Latn)": 75.4, "MasakhaNEWSClassification (sna-Latn)": 82.76, "MasakhaNEWSClassification (som-Latn)": 59.8, "MasakhaNEWSClassification (swa-Latn)": 69.85, "MasakhaNEWSClassification (tir-Ethi)": 68.01, "MasakhaNEWSClassification (xho-Latn)": 72.22, "MasakhaNEWSClassification (yor-Latn)": 73.84, "MasakhaNEWSClassification (fra)": 77.65, "MassiveIntentClassification (ben-Beng)": 50.68, "MassiveIntentClassification (tur-Latn)": 56.88, "MassiveIntentClassification (ind-Latn)": 56.2, "MassiveIntentClassification (khm-Khmr)": 33.45, "MassiveIntentClassification (en)": 63.87, "MassiveIntentClassification (mal-Mlym)": 52.81, "MassiveIntentClassification (pol-Latn)": 57.33, "MassiveIntentClassification (lav-Latn)": 44.93, "MassiveIntentClassification (isl-Latn)": 41.53, "MassiveIntentClassification (sqi-Latn)": 48.68, "MassiveIntentClassification (amh-Ethi)": 43.52, "MassiveIntentClassification (cmo-Hans)": 62.04, "MassiveIntentClassification (nld-Latn)": 59.27, "MassiveIntentClassification (deu-Latn)": 55.52, "MassiveIntentClassification (nob-Latn)": 55.36, "MassiveIntentClassification (cmo-Hant)": 53.75, "MassiveIntentClassification (urd-Arab)": 50.51, "MassiveIntentClassification (slv-Latn)": 47.71, "MassiveIntentClassification (hun-Latn)": 53.21, "MassiveIntentClassification (jpn-Jpan)": 61.58, "MassiveIntentClassification (swa-Latn)": 44.84, "MassiveIntentClassification (fra-Latn)": 57.9, "MassiveIntentClassification (spa-Latn)": 59.19, "MassiveIntentClassification (mon-Cyrl)": 47.38, "MassiveIntentClassification (dan-Latn)": 56.12, "MassiveIntentClassification (msa-Latn)": 50.8, "MassiveIntentClassification (aze-Latn)": 49.32, "MassiveIntentClassification (fas-Arab)": 57.73, "MassiveIntentClassification (kan-Knda)": 47.85, "MassiveIntentClassification (kor-Kore)": 57.12, "MassiveIntentClassification (tha-Thai)": 56.26, "MassiveIntentClassification (heb-Hebr)": 51.11, "MassiveIntentClassification (hin-Deva)": 55.69, "MassiveIntentClassification (ara-Arab)": 47.78, "MassiveIntentClassification (por-Latn)": 60.12, "MassiveIntentClassification (vie-Latn)": 56.19, "MassiveIntentClassification (hye-Armn)": 47.89, "MassiveIntentClassification (ita-Latn)": 58.8, "MassiveIntentClassification (ell-Grek)": 54.14, "MassiveIntentClassification (cym-Latn)": 36.62, "MassiveIntentClassification (tel-Telu)": 48.85, "MassiveIntentClassification (kat-Geor)": 39.52, "MassiveIntentClassification (swe-Latn)": 58.2, "MassiveIntentClassification (tam-Taml)": 47.65, "MassiveIntentClassification (fin-Latn)": 55.14, "MassiveIntentClassification (tgl-Latn)": 48.7, "MassiveIntentClassification (ron-Latn)": 52.82, "MassiveIntentClassification (jav-Latn)": 42.96, "MassiveIntentClassification (rus-Cyrl)": 58.43, "MassiveIntentClassification (afr-Latn)": 48.74, "MassiveIntentClassification (mya-Mymr)": 45.64, "MassiveIntentClassification (da)": 54.63, "MassiveIntentClassification (nb)": 53.96, "MassiveIntentClassification (sv)": 56.6, "MassiveIntentClassification (pl)": 57.4, "MassiveScenarioClassification (nld-Latn)": 67.01, "MassiveScenarioClassification (tur-Latn)": 62.14, "MassiveScenarioClassification (cym-Latn)": 44.63, "MassiveScenarioClassification (jav-Latn)": 51.39, "MassiveScenarioClassification (hin-Deva)": 62.22, "MassiveScenarioClassification (fra-Latn)": 63.9, "MassiveScenarioClassification (cmo-Hans)": 68.96, "MassiveScenarioClassification (kan-Knda)": 52.73, "MassiveScenarioClassification (isl-Latn)": 49.66, "MassiveScenarioClassification (jpn-Jpan)": 67.75, "MassiveScenarioClassification (mal-Mlym)": 60.31, "MassiveScenarioClassification (pol-Latn)": 64.27, "MassiveScenarioClassification (mya-Mymr)": 51.07, "MassiveScenarioClassification (slv-Latn)": 54.05, "MassiveScenarioClassification (rus-Cyrl)": 63.89, "MassiveScenarioClassification (urd-Arab)": 55.91, "MassiveScenarioClassification (fas-Arab)": 63.32, "MassiveScenarioClassification (fin-Latn)": 61.89, "MassiveScenarioClassification (kat-Geor)": 44.96, "MassiveScenarioClassification (sqi-Latn)": 56.15, "MassiveScenarioClassification (en)": 69.28, "MassiveScenarioClassification (hun-Latn)": 61.93, "MassiveScenarioClassification (aze-Latn)": 53.27, "MassiveScenarioClassification (heb-Hebr)": 59.22, "MassiveScenarioClassification (kor-Kore)": 65.7, "MassiveScenarioClassification (nob-Latn)": 61.96, "MassiveScenarioClassification (dan-Latn)": 64.03, "MassiveScenarioClassification (cmo-Hant)": 61.15, "MassiveScenarioClassification (ron-Latn)": 60.0, "MassiveScenarioClassification (amh-Ethi)": 50.53, "MassiveScenarioClassification (spa-Latn)": 64.43, "MassiveScenarioClassification (afr-Latn)": 58.0, "MassiveScenarioClassification (lav-Latn)": 51.0, "MassiveScenarioClassification (deu-Latn)": 65.88, "MassiveScenarioClassification (ita-Latn)": 64.03, "MassiveScenarioClassification (tha-Thai)": 65.72, "MassiveScenarioClassification (msa-Latn)": 59.18, "MassiveScenarioClassification (tam-Taml)": 52.74, "MassiveScenarioClassification (ara-Arab)": 54.56, "MassiveScenarioClassification (tgl-Latn)": 55.3, "MassiveScenarioClassification (por-Latn)": 62.75, "MassiveScenarioClassification (swe-Latn)": 67.33, "MassiveScenarioClassification (tel-Telu)": 54.86, "MassiveScenarioClassification (khm-Khmr)": 39.01, "MassiveScenarioClassification (swa-Latn)": 52.42, "MassiveScenarioClassification (vie-Latn)": 62.67, "MassiveScenarioClassification (ind-Latn)": 62.0, "MassiveScenarioClassification (hye-Armn)": 52.93, "MassiveScenarioClassification (ben-Beng)": 57.38, "MassiveScenarioClassification (mon-Cyrl)": 52.41, "MassiveScenarioClassification (ell-Grek)": 62.29, "MassiveScenarioClassification (da)": 62.34, "MassiveScenarioClassification (nb)": 59.9, "MassiveScenarioClassification (sv)": 65.54, "MassiveScenarioClassification (pl)": 64.25, "MultilingualSentiment (cmn-Hans)": 66.0, "MultilingualSentiment": 64.74, "NoRecClassification (nob-Latn)": 50.08, "NoRecClassification": 53.96, "NordicLangClassification (nob-Latn_nno-Latn_dan-Latn_swe-Latn_isl-Latn_fao-Latn)": 72.15, "NordicLangClassification": 75.15, "NorwegianParliament": 60.15, "OnlineShopping (cmn-Hans)": 88.7, "OnlineShopping": 88.73, "PAC (pol-Latn)": 70.48, "PAC": 70.55, "PolEmo2.0-IN (pol-Latn)": 67.31, "PolEmo2.0-IN": 67.35, "PolEmo2.0-OUT (pol-Latn)": 39.17, "PolEmo2.0-OUT": 39.13, "RuReviewsClassification (rus-Cyrl)": 61.18, "RuSciBenchGRNTIClassification (rus-Cyrl)": 54.99, "RuSciBenchOECDClassification (rus-Cyrl)": 41.72, "ScalaDaClassification": 50.3, "ScalaNbClassification": 50.06, "TNews (cmn-Hans)": 46.6, "TNews": 48.38, "ToxicConversationsClassification": 63.59, "TweetSentimentExtractionClassification": 62.79, "Waimai (cmn-Hans)": 84.15, "Waimai": 83.9 } ] }, "Clustering": { "v_measure": [ { "Model": "multilingual-e5-small", "8TagsClustering": 23.92, "AlloProfClusteringP2P": 60.89, "AlloProfClusteringS2S": 32.52, "BiorxivClusteringP2P": 35.84, "BiorxivClusteringS2S": 27.35, "CLSClusteringP2P": 39.14, "CLSClusteringS2S": 37.79, "GeoreviewClusteringP2P (rus-Cyrl)": 58.57, "HALClusteringS2S": 18.95, "MLSUMClusteringP2P (rus-Cyrl)": 39.69, "MLSUMClusteringP2P": 43.2, "MLSUMClusteringS2S (rus-Cyrl)": 39.9, "MLSUMClusteringS2S": 37.61, "MasakhaNEWSClusteringP2P (amh-Ethi)": 66.2, "MasakhaNEWSClusteringP2P (eng)": 50.08, "MasakhaNEWSClusteringP2P (fra-Latn)": 56.32, "MasakhaNEWSClusteringP2P (hau-Latn)": 53.63, "MasakhaNEWSClusteringP2P (ibo-Latn)": 49.19, "MasakhaNEWSClusteringP2P (lin-Latn)": 55.06, "MasakhaNEWSClusteringP2P (lug-Latn)": 59.97, "MasakhaNEWSClusteringP2P (orm-Ethi)": 32.72, "MasakhaNEWSClusteringP2P (pcm-Latn)": 62.22, "MasakhaNEWSClusteringP2P (run-Latn)": 57.52, "MasakhaNEWSClusteringP2P (sna-Latn)": 45.11, "MasakhaNEWSClusteringP2P (som-Latn)": 42.39, "MasakhaNEWSClusteringP2P (swa-Latn)": 23.77, "MasakhaNEWSClusteringP2P (tir-Ethi)": 57.68, "MasakhaNEWSClusteringP2P (xho-Latn)": 39.96, "MasakhaNEWSClusteringP2P (yor-Latn)": 26.56, "MasakhaNEWSClusteringP2P (fra)": 40.12, "MasakhaNEWSClusteringS2S (amh-Ethi)": 55.48, "MasakhaNEWSClusteringS2S (eng)": 37.79, "MasakhaNEWSClusteringS2S (fra-Latn)": 35.8, "MasakhaNEWSClusteringS2S (hau-Latn)": 20.22, "MasakhaNEWSClusteringS2S (ibo-Latn)": 35.67, "MasakhaNEWSClusteringS2S (lin-Latn)": 41.12, "MasakhaNEWSClusteringS2S (lug-Latn)": 48.63, "MasakhaNEWSClusteringS2S (orm-Ethi)": 29.16, "MasakhaNEWSClusteringS2S (pcm-Latn)": 65.36, "MasakhaNEWSClusteringS2S (run-Latn)": 45.5, "MasakhaNEWSClusteringS2S (sna-Latn)": 47.61, "MasakhaNEWSClusteringS2S (som-Latn)": 28.59, "MasakhaNEWSClusteringS2S (swa-Latn)": 13.91, "MasakhaNEWSClusteringS2S (tir-Ethi)": 50.51, "MasakhaNEWSClusteringS2S (xho-Latn)": 37.26, "MasakhaNEWSClusteringS2S (yor-Latn)": 23.38, "MasakhaNEWSClusteringS2S (fra)": 39.22, "MedrxivClusteringP2P": 30.72, "MedrxivClusteringS2S": 27.0, "RedditClustering": 40.12, "RedditClusteringP2P": 59.49, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 51.1, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 45.29, "StackExchangeClustering": 53.32, "StackExchangeClusteringP2P": 31.87, "ThuNewsClusteringP2P": 55.18, "ThuNewsClusteringS2S": 48.93, "TwentyNewsgroupsClustering": 33.67 } ] }, "PairClassification": { "ap": [ { "Model": "multilingual-e5-small", "CDSC-E (pol-Latn)": 69.69, "CDSC-E": 69.7, "Cmnli": 72.12, "Ocnli": 60.77, "OpusparcusPC (deu-Latn)": 94.9, "OpusparcusPC (en)": 98.42, "OpusparcusPC (fin-Latn)": 88.29, "OpusparcusPC (fra-Latn)": 91.77, "OpusparcusPC (rus-Cyrl)": 84.79, "OpusparcusPC (swe-Latn)": 91.07, "OpusparcusPC (fr)": 92.52, "PPC": 86.72, "PSC (pol-Latn)": 99.23, "PSC": 99.24, "PawsXPairClassification (deu-Latn)": 52.13, "PawsXPairClassification (en)": 53.91, "PawsXPairClassification (spa-Latn)": 51.39, "PawsXPairClassification (fra-Latn)": 52.69, "PawsXPairClassification (jpn-Hira)": 48.24, "PawsXPairClassification (kor-Hang)": 49.95, "PawsXPairClassification (cmn-Hans)": 54.01, "PawsXPairClassification (fr)": 55.68, "SICK-E-PL (pol-Latn)": 66.35, "SICK-E-PL": 66.34, "SprintDuplicateQuestions": 92.18, "TERRa (rus-Cyrl)": 55.14, "TwitterSemEval2015": 70.75, "TwitterURLCorpus": 85.03 } ] }, "Reranking": { "map": [ { "Model": "multilingual-e5-small", "AlloprofReranking (fra-Latn)": 64.41, "AlloprofReranking": 56.17, "AskUbuntuDupQuestions": 56.42, "CMedQAv1": 63.44, "CMedQAv2": 62.41, "MMarcoReranking (cmn-Hans)": 29.98, "MMarcoReranking": 24.33, "MindSmallReranking": 29.96, "RuBQReranking (rus-Cyrl)": 71.46, "SciDocsRR": 78.26, "StackOverflowDupQuestions": 46.97, "SyntecReranking (fra-Latn)": 81.22, "SyntecReranking": 86.7, "T2Reranking (cmn-Hans)": 65.72, "T2Reranking": 65.24 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "multilingual-e5-small", "AILACasedocs": 23.43, "AILAStatutes": 19.01, "ARCChallenge": 7.14, "AlloprofRetrieval (fra-Latn)": 27.38, "AlloprofRetrieval": 27.01, "AlphaNLI": 13.0, "ArguAna": 39.09, "ArguAna-PL (pol-Latn)": 37.49, "ArguAna-PL": 37.43, "BSARDRetrieval (fra-Latn)": 14.54, "BSARDRetrieval": 0.0, "CmedqaRetrieval (cmn-Hans)": 24.36, "CmedqaRetrieval": 24.38, "CovidRetrieval (cmn-Hans)": 72.82, "CovidRetrieval": 72.82, "DBPedia-PL": 29.27, "DuRetrieval (cmn-Hans)": 81.36, "DuRetrieval": 81.35, "EcomRetrieval (cmn-Hans)": 53.53, "EcomRetrieval": 53.56, "FiQA-PL (pol-Latn)": 22.02, "FiQA-PL": 22.03, "FiQA2018": 33.13, "GerDaLIRSmall (deu-Latn)": 14.81, "HellaSwag": 23.73, "HotpotQA-PL": 60.15, "LEMBNarrativeQARetrieval": 22.6, "LEMBNeedleRetrieval": 30.75, "LEMBPasskeyRetrieval": 38.25, "LEMBQMSumRetrieval": 21.51, "LEMBSummScreenFDRetrieval": 62.75, "LEMBWikimQARetrieval": 57.13, "LeCaRDv2 (zho-Hans)": 61.58, "LegalBenchConsumerContractsQA": 66.98, "LegalBenchCorporateLobbying": 89.47, "LegalQuAD (deu-Latn)": 47.8, "LegalSummarization": 55.76, "MMarcoRetrieval (cmn-Hans)": 73.17, "MMarcoRetrieval": 73.17, "MSMARCO-PL": 26.94, "MedicalRetrieval (cmn-Hans)": 44.84, "MedicalRetrieval": 44.84, "MintakaRetrieval (ara-Arab)": 21.22, "MintakaRetrieval (deu-Latn)": 25.6, "MintakaRetrieval (spa-Latn)": 26.4, "MintakaRetrieval (fra-Latn)": 25.0, "MintakaRetrieval (hin-Deva)": 21.1, "MintakaRetrieval (ita-Latn)": 26.25, "MintakaRetrieval (jpn-Hira)": 20.69, "MintakaRetrieval (por-Latn)": 24.44, "MintakaRetrieval (fr)": 22.53, "NFCorpus": 31.0, "NFCorpus-PL (pol-Latn)": 26.5, "NFCorpus-PL": 26.48, "NQ-PL": 40.46, "PIQA": 21.08, "Quail": 2.38, "Quora-PL": 78.7, "RARbCode": 46.96, "RARbMath": 63.91, "RiaNewsRetrieval (rus-Cyrl)": 70.01, "RuBQRetrieval (rus-Cyrl)": 68.53, "SCIDOCS": 13.9, "SCIDOCS-PL (pol-Latn)": 11.59, "SCIDOCS-PL": 11.6, "SIQA": 2.57, "SciFact": 67.7, "SciFact-PL (pol-Latn)": 62.76, "SciFact-PL": 62.76, "SpartQA": 5.43, "SyntecRetrieval (fra-Latn)": 73.46, "SyntecRetrieval": 75.76, "T2Retrieval (cmn-Hans)": 71.36, "T2Retrieval": 71.39, "TRECCOVID": 72.57, "TRECCOVID-PL (pol-Latn)": 70.92, "TRECCOVID-PL": 70.92, "TempReasonL1": 0.8, "TempReasonL2Fact": 36.76, "TempReasonL2Pure": 0.62, "TempReasonL3Fact": 32.42, "TempReasonL3Pure": 6.36, "Touche2020": 21.16, "VideoRetrieval (cmn-Hans)": 58.06, "VideoRetrieval": 58.09, "WinoGrande": 37.46, "XPQARetrieval (ara-Arab_ara-Arab)": 39.93, "XPQARetrieval (eng-Latn_ara-Arab)": 18.09, "XPQARetrieval (ara-Arab_eng-Latn)": 31.64, "XPQARetrieval (deu-Latn_deu-Latn)": 69.43, "XPQARetrieval (eng-Latn_deu-Latn)": 25.14, "XPQARetrieval (deu-Latn_eng-Latn)": 52.36, "XPQARetrieval (spa-Latn_spa-Latn)": 55.71, "XPQARetrieval (eng-Latn_spa-Latn)": 22.5, "XPQARetrieval (spa-Latn_eng-Latn)": 42.4, "XPQARetrieval (fra-Latn_fra-Latn)": 57.17, "XPQARetrieval (eng-Latn_fra-Latn)": 27.69, "XPQARetrieval (fra-Latn_eng-Latn)": 47.46, "XPQARetrieval (hin-Deva_hin-Deva)": 68.15, "XPQARetrieval (eng-Latn_hin-Deva)": 25.82, "XPQARetrieval (hin-Deva_eng-Latn)": 63.79, "XPQARetrieval (ita-Latn_ita-Latn)": 67.71, "XPQARetrieval (eng-Latn_ita-Latn)": 22.97, "XPQARetrieval (ita-Latn_eng-Latn)": 46.61, "XPQARetrieval (jpn-Hira_jpn-Hira)": 69.49, "XPQARetrieval (eng-Latn_jpn-Hira)": 25.08, "XPQARetrieval (jpn-Hira_eng-Latn)": 54.6, "XPQARetrieval (kor-Hang_kor-Hang)": 33.0, "XPQARetrieval (eng-Latn_kor-Hang)": 22.49, "XPQARetrieval (kor-Hang_eng-Latn)": 23.02, "XPQARetrieval (pol-Latn_pol-Latn)": 43.37, "XPQARetrieval (eng-Latn_pol-Latn)": 19.89, "XPQARetrieval (pol-Latn_eng-Latn)": 28.72, "XPQARetrieval (por-Latn_por-Latn)": 41.8, "XPQARetrieval (eng-Latn_por-Latn)": 15.79, "XPQARetrieval (por-Latn_eng-Latn)": 33.74, "XPQARetrieval (tam-Taml_tam-Taml)": 31.65, "XPQARetrieval (eng-Latn_tam-Taml)": 13.18, "XPQARetrieval (tam-Taml_eng-Latn)": 26.44, "XPQARetrieval (cmn-Hans_cmn-Hans)": 63.98, "XPQARetrieval (eng-Latn_cmn-Hans)": 16.52, "XPQARetrieval (cmn-Hans_eng-Latn)": 45.32, "XPQARetrieval (fr)": 57.47 } ] }, "STS": { "spearman": [ { "Model": "multilingual-e5-small", "AFQMC (cmn-Hans)": 25.21, "AFQMC": 25.21, "ATEC (cmn-Hans)": 35.14, "ATEC": 35.14, "BIOSSES": 82.46, "BQ (cmn-Hans)": 43.27, "BQ": 43.27, "CDSC-R (pol-Latn)": 90.27, "CDSC-R": 90.27, "LCQMC (cmn-Hans)": 72.7, "LCQMC": 72.7, "PAWSX (cmn-Hans)": 11.0, "PAWSX": 11.01, "QBQTC": 30.25, "RUParaPhraserSTS (rus-Cyrl)": 70.46, "RuSTSBenchmarkSTS (rus-Cyrl)": 78.08, "SICK-R": 77.51, "SICK-R-PL (pol-Latn)": 69.45, "SICK-R-PL": 69.46, "SICKFr (fra-Latn)": 74.67, "SICKFr": 75.62, "STS12": 76.56, "STS13": 76.97, "STS14": 75.52, "STS15": 87.12, "STS16": 83.63, "STS17 (ita-Latn_eng-Latn)": 77.31, "STS17 (en-en)": 86.42, "STS17 (eng-Latn_ara-Arab)": 57.39, "STS17 (eng-Latn_tur-Latn)": 55.93, "STS17 (spa-Latn_eng-Latn)": 72.43, "STS17 (kor-Hang)": 78.87, "STS17 (spa-Latn)": 84.83, "STS17 (eng-Latn_deu-Latn)": 76.82, "STS17 (fra-Latn_eng-Latn)": 72.28, "STS17 (nld-Latn_eng-Latn)": 75.43, "STS17 (ara-Arab)": 73.0, "STS22 (ita-Latn)": 76.53, "STS22 (en)": 61.25, "STS22 (pol-Latn_eng-Latn)": 72.69, "STS22 (cmn-Hans)": 66.85, "STS22 (fra-Latn)": 76.58, "STS22 (deu-Latn)": 53.45, "STS22 (fra-Latn_pol-Latn)": 84.52, "STS22 (deu-Latn_pol-Latn)": 28.24, "STS22 (spa-Latn_eng-Latn)": 74.2, "STS22 (spa-Latn)": 66.86, "STS22 (rus-Cyrl)": 59.9, "STS22 (spa-Latn_ita-Latn)": 71.74, "STS22 (pol-Latn)": 35.78, "STS22 (tur-Latn)": 63.69, "STS22 (ara-Arab)": 56.65, "STS22 (cmn-Hans_eng-Latn)": 65.32, "STS22 (deu-Latn_eng-Latn)": 56.07, "STS22 (deu-Latn_fra-Latn)": 60.62, "STS22 (pl)": 35.8, "STSB (cmn-Hans)": 77.73, "STSB": 77.73, "STSBenchmark": 84.11, "STSBenchmarkMultilingualSTS (en)": 84.11, "STSBenchmarkMultilingualSTS (cmn-Hans)": 78.49, "STSBenchmarkMultilingualSTS (rus-Cyrl)": 78.24, "STSBenchmarkMultilingualSTS (spa-Latn)": 80.31, "STSBenchmarkMultilingualSTS (deu-Latn)": 79.17, "STSBenchmarkMultilingualSTS (fra-Latn)": 79.2, "STSBenchmarkMultilingualSTS (nld-Latn)": 76.04, "STSBenchmarkMultilingualSTS (pol-Latn)": 72.61, "STSBenchmarkMultilingualSTS (por-Latn)": 77.39, "STSBenchmarkMultilingualSTS (ita-Latn)": 78.21, "STSBenchmarkMultilingualSTS (fr)": 79.32 } ] }, "Summarization": { "spearman": [ { "Model": "multilingual-e5-small", "SummEval": 30.04, "SummEvalFr (fra-Latn)": 31.14, "SummEvalFr": 31.85 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "multilingual-e5-small" } ] } }, "bert-base-multilingual-cased": { "BitextMining": { "f1": [ { "Model": "bert-base-multilingual-cased" } ] }, "Classification": { "accuracy": [ { "Model": "bert-base-multilingual-cased", "AmazonReviewsClassification (fr)": 29.39, "MTOPDomainClassification (fr)": 63.61, "MTOPIntentClassification (fr)": 37.84, "MasakhaNEWSClassification (fra)": 64.0, "MassiveIntentClassification (fr)": 37.3, "MassiveScenarioClassification (fr)": 44.47 } ] }, "Clustering": { "v_measure": [ { "Model": "bert-base-multilingual-cased", "AlloProfClusteringP2P": 51.5, "AlloProfClusteringS2S": 43.06, "HALClusteringS2S": 20.81, "MLSUMClusteringP2P": 40.9, "MLSUMClusteringS2S": 31.8, "MasakhaNEWSClusteringP2P (fra)": 24.23, "MasakhaNEWSClusteringS2S (fra)": 24.46 } ] }, "PairClassification": { "ap": [ { "Model": "bert-base-multilingual-cased", "OpusparcusPC (fr)": 86.77, "PawsXPairClassification (fr)": 53.39 } ] }, "Reranking": { "map": [ { "Model": "bert-base-multilingual-cased", "AlloprofReranking": 36.23, "SyntecReranking": 53.25 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "bert-base-multilingual-cased", "AlloprofRetrieval": 1.63, "BSARDRetrieval": 0.0, "MintakaRetrieval (fr)": 3.55, "SyntecRetrieval": 18.95, "XPQARetrieval (fr)": 18.49 } ] }, "STS": { "spearman": [ { "Model": "bert-base-multilingual-cased", "SICKFr": 58.75, "STS22 (fr)": 39.05, "STSBenchmarkMultilingualSTS (fr)": 52.25 } ] }, "Summarization": { "spearman": [ { "Model": "bert-base-multilingual-cased", "SummEvalFr": 28.81 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "bert-base-multilingual-cased" } ] } }, "instructor-large": { "BitextMining": { "f1": [ { "Model": "instructor-large" } ] }, "Classification": { "accuracy": [ { "Model": "instructor-large" } ] }, "Clustering": { "v_measure": [ { "Model": "instructor-large" } ] }, "PairClassification": { "ap": [ { "Model": "instructor-large" } ] }, "Reranking": { "map": [ { "Model": "instructor-large" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "instructor-large", "BrightRetrieval (pony)": 1.32, "BrightRetrieval (sustainable_living)": 13.16, "BrightRetrieval (aops)": 7.94, "BrightRetrieval (biology)": 15.61, "BrightRetrieval (stackoverflow)": 11.21, "BrightRetrieval (theoremqa_theorems)": 9.29, "BrightRetrieval (psychology)": 21.94, "BrightRetrieval (economics)": 15.99, "BrightRetrieval (robotics)": 11.45, "BrightRetrieval (leetcode)": 20.0, "BrightRetrieval (earth_science)": 21.52, "BrightRetrieval (theoremqa_questions)": 20.07 } ] }, "STS": { "spearman": [ { "Model": "instructor-large" } ] }, "Summarization": { "spearman": [ { "Model": "instructor-large" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "instructor-large" } ] } }, "nomic-embed-text-v1": { "BitextMining": { "f1": [ { "Model": "nomic-embed-text-v1" } ] }, "Classification": { "accuracy": [ { "Model": "nomic-embed-text-v1" } ] }, "Clustering": { "v_measure": [ { "Model": "nomic-embed-text-v1" } ] }, "PairClassification": { "ap": [ { "Model": "nomic-embed-text-v1" } ] }, "Reranking": { "map": [ { "Model": "nomic-embed-text-v1" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "nomic-embed-text-v1", "LEMBNarrativeQARetrieval": 41.23, "LEMBNeedleRetrieval": 39.5, "LEMBPasskeyRetrieval": 44.75, "LEMBQMSumRetrieval": 36.65, "LEMBSummScreenFDRetrieval": 92.97, "LEMBWikimQARetrieval": 73.75 } ] }, "STS": { "spearman": [ { "Model": "nomic-embed-text-v1" } ] }, "Summarization": { "spearman": [ { "Model": "nomic-embed-text-v1" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "nomic-embed-text-v1" } ] } }, "sup-simcse-bert-base-uncased": { "BitextMining": { "f1": [ { "Model": "sup-simcse-bert-base-uncased" } ] }, "Classification": { "accuracy": [ { "Model": "sup-simcse-bert-base-uncased", "AmazonCounterfactualClassification (en)": 75.75, "AmazonPolarityClassification": 82.47, "AmazonReviewsClassification (en)": 39.6, "Banking77Classification": 75.76, "EmotionClassification": 44.81, "ImdbClassification": 73.53, "MTOPDomainClassification (en)": 84.29, "MTOPIntentClassification (en)": 63.14, "MassiveIntentClassification (en)": 65.95, "MassiveScenarioClassification (en)": 70.78, "ToxicConversationsClassification": 72.04, "TweetSentimentExtractionClassification": 59.73 } ] }, "Clustering": { "v_measure": [ { "Model": "sup-simcse-bert-base-uncased", "ArxivClusteringP2P": 35.18, "ArxivClusteringS2S": 27.54, "BiorxivClusteringP2P": 30.15, "BiorxivClusteringS2S": 24.67, "MedrxivClusteringP2P": 26.25, "MedrxivClusteringS2S": 24.12, "RedditClustering": 40.23, "RedditClusteringP2P": 47.74, "StackExchangeClustering": 47.55, "StackExchangeClusteringP2P": 29.45, "TwentyNewsgroupsClustering": 34.86 } ] }, "PairClassification": { "ap": [ { "Model": "sup-simcse-bert-base-uncased", "SprintDuplicateQuestions": 69.39, "TwitterSemEval2015": 67.75, "TwitterURLCorpus": 83.89 } ] }, "Reranking": { "map": [ { "Model": "sup-simcse-bert-base-uncased", "AskUbuntuDupQuestions": 51.8, "MindSmallReranking": 29.3, "SciDocsRR": 70.14, "StackOverflowDupQuestions": 38.9 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "sup-simcse-bert-base-uncased", "ArguAna": 38.33, "CQADupstackRetrieval": 14.5, "ClimateFEVER": 11.98, "DBPedia": 19.73, "FEVER": 20.41, "FiQA2018": 10.41, "HotpotQA": 22.9, "MSMARCO": 11.0, "NFCorpus": 12.42, "NQ": 16.08, "QuoraRetrieval": 79.62, "SCIDOCS": 7.53, "SciFact": 29.59, "TRECCOVID": 22.93, "Touche2020": 9.9 } ] }, "STS": { "spearman": [ { "Model": "sup-simcse-bert-base-uncased", "BIOSSES": 68.38, "SICK-R": 80.77, "STS12": 75.3, "STS13": 84.67, "STS14": 80.19, "STS15": 85.4, "STS16": 80.82, "STS17 (en-en)": 89.44, "STS22 (en)": 61.96, "STSBenchmark": 84.25 } ] }, "Summarization": { "spearman": [ { "Model": "sup-simcse-bert-base-uncased", "SummEval": 31.17 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "sup-simcse-bert-base-uncased" } ] } }, "text-similarity-babbage-001": { "BitextMining": { "f1": [ { "Model": "text-similarity-babbage-001" } ] }, "Classification": { "accuracy": [ { "Model": "text-similarity-babbage-001" } ] }, "Clustering": { "v_measure": [ { "Model": "text-similarity-babbage-001", "RedditClustering": 45.64, "StackExchangeClustering": 53.01, "TwentyNewsgroupsClustering": 42.01 } ] }, "PairClassification": { "ap": [ { "Model": "text-similarity-babbage-001", "SprintDuplicateQuestions": 76.46, "TwitterSemEval2015": 70.85, "TwitterURLCorpus": 85.08 } ] }, "Reranking": { "map": [ { "Model": "text-similarity-babbage-001", "AskUbuntuDupQuestions": 54.68, "SciDocsRR": 72.78, "StackOverflowDupQuestions": 40.65 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "text-similarity-babbage-001" } ] }, "STS": { "spearman": [ { "Model": "text-similarity-babbage-001", "BIOSSES": 78.12, "SICK-R": 77.02, "STSBenchmark": 84.32 } ] }, "Summarization": { "spearman": [ { "Model": "text-similarity-babbage-001" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "text-similarity-babbage-001" } ] } }, "LLM2Vec-Meta-Llama-3-unsupervised": { "BitextMining": { "f1": [ { "Model": "LLM2Vec-Meta-Llama-3-unsupervised" } ] }, "Classification": { "accuracy": [ { "Model": "LLM2Vec-Meta-Llama-3-unsupervised", "AmazonCounterfactualClassification (en)": 75.7, "AmazonPolarityClassification": 80.68, "AmazonReviewsClassification (en)": 40.0, "Banking77Classification": 84.77, "EmotionClassification": 47.08, "ImdbClassification": 75.19, "MTOPDomainClassification (en)": 94.47, "MTOPIntentClassification (en)": 81.09, "MassiveIntentClassification (en)": 75.01, "MassiveScenarioClassification (en)": 79.16, "ToxicConversationsClassification": 71.85, "TweetSentimentExtractionClassification": 57.61 } ] }, "Clustering": { "v_measure": [ { "Model": "LLM2Vec-Meta-Llama-3-unsupervised", "ArxivClusteringP2P": 49.22, "ArxivClusteringS2S": 41.71, "BiorxivClusteringP2P": 38.39, "BiorxivClusteringS2S": 31.31, "MedrxivClusteringP2P": 31.47, "MedrxivClusteringS2S": 27.87, "RedditClustering": 43.67, "RedditClusteringP2P": 61.67, "StackExchangeClustering": 68.2, "StackExchangeClusteringP2P": 36.36, "TwentyNewsgroupsClustering": 32.01 } ] }, "PairClassification": { "ap": [ { "Model": "LLM2Vec-Meta-Llama-3-unsupervised", "SprintDuplicateQuestions": 88.14, "TwitterSemEval2015": 66.6, "TwitterURLCorpus": 79.3 } ] }, "Reranking": { "map": [ { "Model": "LLM2Vec-Meta-Llama-3-unsupervised", "AskUbuntuDupQuestions": 57.16, "MindSmallReranking": 30.1, "SciDocsRR": 76.28, "StackOverflowDupQuestions": 48.82 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "LLM2Vec-Meta-Llama-3-unsupervised", "ArguAna": 51.73, "CQADupstackRetrieval": 32.4, "ClimateFEVER": 23.58, "DBPedia": 26.78, "FEVER": 53.42, "FiQA2018": 28.56, "HotpotQA": 52.37, "MSMARCO": 17.47, "NFCorpus": 26.28, "NQ": 37.65, "QuoraRetrieval": 84.64, "SCIDOCS": 10.39, "SciFact": 66.36, "TRECCOVID": 63.34, "Touche2020": 12.82 } ] }, "STS": { "spearman": [ { "Model": "LLM2Vec-Meta-Llama-3-unsupervised", "BIOSSES": 84.67, "SICK-R": 72.16, "STS12": 61.6, "STS13": 79.71, "STS14": 72.11, "STS15": 82.18, "STS16": 79.41, "STS17 (en-en)": 85.44, "STS22 (en)": 63.9, "STSBenchmark": 77.44 } ] }, "Summarization": { "spearman": [ { "Model": "LLM2Vec-Meta-Llama-3-unsupervised", "SummEval": 31.45 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "LLM2Vec-Meta-Llama-3-unsupervised" } ] } }, "bge-m3-instruct": { "BitextMining": { "f1": [ { "Model": "bge-m3-instruct" } ] }, "Classification": { "accuracy": [ { "Model": "bge-m3-instruct" } ] }, "Clustering": { "v_measure": [ { "Model": "bge-m3-instruct" } ] }, "PairClassification": { "ap": [ { "Model": "bge-m3-instruct" } ] }, "Reranking": { "map": [ { "Model": "bge-m3-instruct" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "bge-m3-instruct", "ARCChallenge": 9.03, "AlphaNLI": 24.69, "HellaSwag": 25.55, "PIQA": 19.03, "Quail": 7.08, "RARbCode": 39.58, "RARbMath": 64.51, "SIQA": 4.77, "SpartQA": 7.0, "TempReasonL1": 0.8, "TempReasonL2Fact": 34.99, "TempReasonL2Pure": 0.62, "TempReasonL3Fact": 32.47, "TempReasonL3Pure": 7.01, "WinoGrande": 35.33 } ] }, "STS": { "spearman": [ { "Model": "bge-m3-instruct" } ] }, "Summarization": { "spearman": [ { "Model": "bge-m3-instruct" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "bge-m3-instruct" } ] } }, "multi-qa-MiniLM-L6-cos-v1": { "BitextMining": { "f1": [ { "Model": "multi-qa-MiniLM-L6-cos-v1" } ] }, "Classification": { "accuracy": [ { "Model": "multi-qa-MiniLM-L6-cos-v1", "AmazonReviewsClassification (fr)": 27.05, "MTOPDomainClassification (fr)": 72.97, "MTOPIntentClassification (fr)": 37.18, "MasakhaNEWSClassification (fra)": 75.62, "MassiveIntentClassification (fr)": 42.64, "MassiveScenarioClassification (fr)": 49.92 } ] }, "Clustering": { "v_measure": [ { "Model": "multi-qa-MiniLM-L6-cos-v1", "AlloProfClusteringP2P": 49.13, "AlloProfClusteringS2S": 26.16, "HALClusteringS2S": 12.49, "MLSUMClusteringP2P": 35.15, "MLSUMClusteringS2S": 25.95, "MasakhaNEWSClusteringP2P (fra)": 53.73, "MasakhaNEWSClusteringS2S (fra)": 27.27 } ] }, "PairClassification": { "ap": [ { "Model": "multi-qa-MiniLM-L6-cos-v1", "OpusparcusPC (fr)": 88.07, "PawsXPairClassification (fr)": 57.36 } ] }, "Reranking": { "map": [ { "Model": "multi-qa-MiniLM-L6-cos-v1", "AlloprofReranking": 40.28, "SyntecReranking": 65.08 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "multi-qa-MiniLM-L6-cos-v1", "AlloprofRetrieval": 30.23, "BSARDRetrieval": 0.0, "MintakaRetrieval (fr)": 16.31, "SyntecRetrieval": 58.07, "XPQARetrieval (fr)": 48.83 } ] }, "STS": { "spearman": [ { "Model": "multi-qa-MiniLM-L6-cos-v1", "SICKFr": 62.11, "STS22 (fr)": 74.62, "STSBenchmarkMultilingualSTS (fr)": 63.85 } ] }, "Summarization": { "spearman": [ { "Model": "multi-qa-MiniLM-L6-cos-v1", "SummEvalFr": 27.59 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "multi-qa-MiniLM-L6-cos-v1" } ] } }, "titan-embed-text-v1": { "BitextMining": { "f1": [ { "Model": "titan-embed-text-v1" } ] }, "Classification": { "accuracy": [ { "Model": "titan-embed-text-v1", "AmazonCounterfactualClassification (en)": 61.85, "Banking77Classification": 83.21 } ] }, "Clustering": { "v_measure": [ { "Model": "titan-embed-text-v1" } ] }, "PairClassification": { "ap": [ { "Model": "titan-embed-text-v1" } ] }, "Reranking": { "map": [ { "Model": "titan-embed-text-v1", "SciDocsRR": 88.87 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "titan-embed-text-v1", "ArguAna": 48.83, "FiQA2018": 40.38, "MSMARCO": 35.19, "NQ": 51.08, "SciFact": 73.5, "TRECCOVID": 54.74 } ] }, "STS": { "spearman": [ { "Model": "titan-embed-text-v1", "BIOSSES": 84.17, "SICK-R": 73.05, "STS12": 66.59, "STS13": 83.24, "STS14": 73.71, "STS15": 82.4, "STS16": NaN, "STS17 (en-en)": 80.9, "STSBenchmark": 74.85 } ] }, "Summarization": { "spearman": [ { "Model": "titan-embed-text-v1" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "titan-embed-text-v1" } ] } }, "udever-bloom-560m": { "BitextMining": { "f1": [ { "Model": "udever-bloom-560m" } ] }, "Classification": { "accuracy": [ { "Model": "udever-bloom-560m", "AmazonReviewsClassification (fr)": 26.85, "MTOPDomainClassification (fr)": 34.99, "MTOPIntentClassification (fr)": 15.76, "MasakhaNEWSClassification (fra)": 67.94, "MassiveIntentClassification (fr)": 15.09, "MassiveScenarioClassification (fr)": 21.67 } ] }, "Clustering": { "v_measure": [ { "Model": "udever-bloom-560m", "AlloProfClusteringP2P": 53.57, "AlloProfClusteringS2S": 22.13, "HALClusteringS2S": 7.68, "MLSUMClusteringP2P": 36.43, "MLSUMClusteringS2S": 25.26, "MasakhaNEWSClusteringP2P (fra)": 37.57, "MasakhaNEWSClusteringS2S (fra)": 20.58 } ] }, "PairClassification": { "ap": [ { "Model": "udever-bloom-560m", "OpusparcusPC (fr)": 82.1, "PawsXPairClassification (fr)": 59.69 } ] }, "Reranking": { "map": [ { "Model": "udever-bloom-560m", "AlloprofReranking": 28.75, "SyntecReranking": 50.88 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "udever-bloom-560m", "AlloprofRetrieval": 1.98, "BSARDRetrieval": 0.0, "MintakaRetrieval (fr)": 0.48, "SyntecRetrieval": 24.45, "XPQARetrieval (fr)": 12.98 } ] }, "STS": { "spearman": [ { "Model": "udever-bloom-560m", "SICKFr": 54.54, "STS22 (fr)": 61.35, "STSBenchmarkMultilingualSTS (fr)": 36.78 } ] }, "Summarization": { "spearman": [ { "Model": "udever-bloom-560m", "SummEvalFr": 23.63 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "udever-bloom-560m" } ] } }, "LLM2Vec-Llama-2-supervised": { "BitextMining": { "f1": [ { "Model": "LLM2Vec-Llama-2-supervised" } ] }, "Classification": { "accuracy": [ { "Model": "LLM2Vec-Llama-2-supervised", "AmazonCounterfactualClassification (en)": 82.22, "AmazonPolarityClassification": 89.69, "AmazonReviewsClassification (en)": 48.47, "Banking77Classification": 88.17, "EmotionClassification": 51.71, "ImdbClassification": 85.78, "MTOPDomainClassification (en)": 95.57, "MTOPIntentClassification (en)": 82.81, "MassiveIntentClassification (en)": 78.06, "MassiveScenarioClassification (en)": 81.35, "ToxicConversationsClassification": 71.01, "TweetSentimentExtractionClassification": 61.11 } ] }, "Clustering": { "v_measure": [ { "Model": "LLM2Vec-Llama-2-supervised", "ArxivClusteringP2P": 43.14, "ArxivClusteringS2S": 42.38, "BiorxivClusteringP2P": 35.88, "BiorxivClusteringS2S": 34.81, "MedrxivClusteringP2P": 32.23, "MedrxivClusteringS2S": 31.37, "RedditClustering": 61.1, "RedditClusteringP2P": 64.52, "StackExchangeClustering": 67.98, "StackExchangeClusteringP2P": 33.2, "TwentyNewsgroupsClustering": 51.04 } ] }, "PairClassification": { "ap": [ { "Model": "LLM2Vec-Llama-2-supervised", "SprintDuplicateQuestions": 96.83, "TwitterSemEval2015": 80.7, "TwitterURLCorpus": 86.56 } ] }, "Reranking": { "map": [ { "Model": "LLM2Vec-Llama-2-supervised", "AskUbuntuDupQuestions": 63.13, "MindSmallReranking": 31.34, "SciDocsRR": 84.03, "StackOverflowDupQuestions": 51.02 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "LLM2Vec-Llama-2-supervised", "ArguAna": 56.53, "CQADupstackRetrieval": 45.94, "ClimateFEVER": 30.7, "DBPedia": 48.42, "FEVER": 89.93, "FiQA2018": 51.28, "HotpotQA": 72.99, "MSMARCO": 41.46, "NFCorpus": 40.33, "NQ": 61.24, "QuoraRetrieval": 85.59, "SCIDOCS": 21.05, "SciFact": 77.3, "TRECCOVID": 79.25, "Touche2020": 16.92 } ] }, "STS": { "spearman": [ { "Model": "LLM2Vec-Llama-2-supervised", "BIOSSES": 82.13, "SICK-R": 83.01, "STS12": 78.85, "STS13": 86.84, "STS14": 84.04, "STS15": 88.72, "STS16": 86.79, "STS17 (en-en)": 90.63, "STS22 (en)": 67.55, "STSBenchmark": 88.72 } ] }, "Summarization": { "spearman": [ { "Model": "LLM2Vec-Llama-2-supervised", "SummEval": 28.49 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "LLM2Vec-Llama-2-supervised" } ] } }, "bert-base-25lang-cased": { "BitextMining": { "f1": [ { "Model": "bert-base-25lang-cased" } ] }, "Classification": { "accuracy": [ { "Model": "bert-base-25lang-cased", "AmazonReviewsClassification (fr)": 29.39, "MTOPDomainClassification (fr)": 63.63, "MTOPIntentClassification (fr)": 37.86, "MasakhaNEWSClassification (fra)": 63.91, "MassiveIntentClassification (fr)": 37.3, "MassiveScenarioClassification (fr)": 44.47 } ] }, "Clustering": { "v_measure": [ { "Model": "bert-base-25lang-cased", "AlloProfClusteringP2P": 53.49, "AlloProfClusteringS2S": 43.1, "HALClusteringS2S": 19.78, "MLSUMClusteringP2P": 40.73, "MLSUMClusteringS2S": 31.94, "MasakhaNEWSClusteringP2P (fra)": 24.23, "MasakhaNEWSClusteringS2S (fra)": 24.46 } ] }, "PairClassification": { "ap": [ { "Model": "bert-base-25lang-cased", "OpusparcusPC (fr)": 86.79, "PawsXPairClassification (fr)": 53.39 } ] }, "Reranking": { "map": [ { "Model": "bert-base-25lang-cased", "AlloprofReranking": 36.25, "SyntecReranking": 53.25 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "bert-base-25lang-cased", "AlloprofRetrieval": 1.6, "BSARDRetrieval": 0.0, "MintakaRetrieval (fr)": 3.55, "SyntecRetrieval": 18.95, "XPQARetrieval (fr)": 18.46 } ] }, "STS": { "spearman": [ { "Model": "bert-base-25lang-cased", "SICKFr": 58.76, "STS22 (fr)": 38.77, "STSBenchmarkMultilingualSTS (fr)": 52.25 } ] }, "Summarization": { "spearman": [ { "Model": "bert-base-25lang-cased", "SummEvalFr": 28.84 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "bert-base-25lang-cased" } ] } }, "flan-t5-base": { "BitextMining": { "f1": [ { "Model": "flan-t5-base" } ] }, "Classification": { "accuracy": [ { "Model": "flan-t5-base" } ] }, "Clustering": { "v_measure": [ { "Model": "flan-t5-base" } ] }, "PairClassification": { "ap": [ { "Model": "flan-t5-base" } ] }, "Reranking": { "map": [ { "Model": "flan-t5-base" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "flan-t5-base" } ] }, "STS": { "spearman": [ { "Model": "flan-t5-base" } ] }, "Summarization": { "spearman": [ { "Model": "flan-t5-base" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "flan-t5-base", "Core17InstructionRetrieval": -3.31, "News21InstructionRetrieval": -0.12, "Robust04InstructionRetrieval": 5.35 } ] } }, "sbert_large_mt_nlu_ru": { "BitextMining": { "f1": [ { "Model": "sbert_large_mt_nlu_ru" } ] }, "Classification": { "accuracy": [ { "Model": "sbert_large_mt_nlu_ru", "GeoreviewClassification (rus-Cyrl)": 39.67, "HeadlineClassification (rus-Cyrl)": 77.19, "InappropriatenessClassification (rus-Cyrl)": 64.64, "KinopoiskClassification (rus-Cyrl)": 50.33, "MassiveIntentClassification (rus-Cyrl)": 61.42, "MassiveScenarioClassification (rus-Cyrl)": 68.13, "RuReviewsClassification (rus-Cyrl)": 58.29, "RuSciBenchGRNTIClassification (rus-Cyrl)": 54.19, "RuSciBenchOECDClassification (rus-Cyrl)": 43.8 } ] }, "Clustering": { "v_measure": [ { "Model": "sbert_large_mt_nlu_ru", "GeoreviewClusteringP2P (rus-Cyrl)": 58.45, "RuSciBenchGRNTIClusteringP2P (rus-Cyrl)": 52.2, "RuSciBenchOECDClusteringP2P (rus-Cyrl)": 47.29 } ] }, "PairClassification": { "ap": [ { "Model": "sbert_large_mt_nlu_ru", "TERRa (rus-Cyrl)": 51.97 } ] }, "Reranking": { "map": [ { "Model": "sbert_large_mt_nlu_ru", "RuBQReranking (rus-Cyrl)": 56.13 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "sbert_large_mt_nlu_ru", "RiaNewsRetrieval (rus-Cyrl)": 21.4, "RuBQRetrieval (rus-Cyrl)": 29.8 } ] }, "STS": { "spearman": [ { "Model": "sbert_large_mt_nlu_ru", "RUParaPhraserSTS (rus-Cyrl)": 65.17, "RuSTSBenchmarkSTS (rus-Cyrl)": 71.22, "STS22 (rus-Cyrl)": 56.82 } ] }, "Summarization": { "spearman": [ { "Model": "sbert_large_mt_nlu_ru" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "sbert_large_mt_nlu_ru" } ] } }, "monobert-large-msmarco": { "BitextMining": { "f1": [ { "Model": "monobert-large-msmarco" } ] }, "Classification": { "accuracy": [ { "Model": "monobert-large-msmarco" } ] }, "Clustering": { "v_measure": [ { "Model": "monobert-large-msmarco" } ] }, "PairClassification": { "ap": [ { "Model": "monobert-large-msmarco" } ] }, "Reranking": { "map": [ { "Model": "monobert-large-msmarco" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "monobert-large-msmarco" } ] }, "STS": { "spearman": [ { "Model": "monobert-large-msmarco" } ] }, "Summarization": { "spearman": [ { "Model": "monobert-large-msmarco" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "monobert-large-msmarco", "Core17InstructionRetrieval": -0.24, "News21InstructionRetrieval": -0.8, "Robust04InstructionRetrieval": -9.36 } ] } }, "bm25s": { "BitextMining": { "f1": [ { "Model": "bm25s" } ] }, "Classification": { "accuracy": [ { "Model": "bm25s" } ] }, "Clustering": { "v_measure": [ { "Model": "bm25s" } ] }, "PairClassification": { "ap": [ { "Model": "bm25s" } ] }, "Reranking": { "map": [ { "Model": "bm25s" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "bm25s", "ArguAna": 49.28, "CQADupstackRetrieval": 31.86, "ClimateFEVER": 13.62, "DBPedia": 29.91, "FEVER": 48.09, "FiQA2018": 25.14, "HotpotQA": 56.91, "MSMARCO": 21.89, "NFCorpus": 32.08, "NQ": 28.5, "QuoraRetrieval": 80.42, "SCIDOCS": 15.78, "SciFact": 68.7, "TRECCOVID": 62.31, "Touche2020": 33.05 } ] }, "STS": { "spearman": [ { "Model": "bm25s" } ] }, "Summarization": { "spearman": [ { "Model": "bm25s" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "bm25s" } ] } }, "gottbert-base": { "BitextMining": { "f1": [ { "Model": "gottbert-base" } ] }, "Classification": { "accuracy": [ { "Model": "gottbert-base" } ] }, "Clustering": { "v_measure": [ { "Model": "gottbert-base", "BlurbsClusteringP2P": 34.49, "BlurbsClusteringS2S": 8.37, "TenKGnadClusteringP2P": 33.66, "TenKGnadClusteringS2S": 9.34 } ] }, "PairClassification": { "ap": [ { "Model": "gottbert-base" } ] }, "Reranking": { "map": [ { "Model": "gottbert-base" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "gottbert-base" } ] }, "STS": { "spearman": [ { "Model": "gottbert-base" } ] }, "Summarization": { "spearman": [ { "Model": "gottbert-base" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "gottbert-base" } ] } }, "text2vec-base-chinese": { "BitextMining": { "f1": [ { "Model": "text2vec-base-chinese" } ] }, "Classification": { "accuracy": [ { "Model": "text2vec-base-chinese", "AmazonReviewsClassification (zh)": 34.12, "IFlyTek": 42.05, "JDReview": 82.14, "MassiveIntentClassification (zh-CN)": 63.98, "MassiveScenarioClassification (zh-CN)": 70.52, "MultilingualSentiment": 60.98, "OnlineShopping": 85.69, "TNews": 43.01, "Waimai": 77.22 } ] }, "Clustering": { "v_measure": [ { "Model": "text2vec-base-chinese", "CLSClusteringP2P": 35.27, "CLSClusteringS2S": 32.42, "ThuNewsClusteringP2P": 42.92, "ThuNewsClusteringS2S": 40.01 } ] }, "PairClassification": { "ap": [ { "Model": "text2vec-base-chinese", "Cmnli": 73.87, "Ocnli": 60.95 } ] }, "Reranking": { "map": [ { "Model": "text2vec-base-chinese", "CMedQAv1": 59.26, "CMedQAv2": 59.82, "MMarcoReranking": 12.76, "T2Reranking": 65.95 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "text2vec-base-chinese", "CmedqaRetrieval": 15.91, "CovidRetrieval": 44.81, "DuRetrieval": 52.23, "EcomRetrieval": 34.6, "MMarcoRetrieval": 44.06, "MedicalRetrieval": 27.56, "T2Retrieval": 51.67, "VideoRetrieval": 39.52 } ] }, "STS": { "spearman": [ { "Model": "text2vec-base-chinese", "AFQMC": 26.06, "ATEC": 31.93, "BQ": 42.67, "LCQMC": 70.16, "PAWSX": 17.21, "QBQTC": 24.62, "STS22 (zh)": 55.35, "STSB": 79.3 } ] }, "Summarization": { "spearman": [ { "Model": "text2vec-base-chinese" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "text2vec-base-chinese" } ] } }, "voyage-lite-02-instruct": { "BitextMining": { "f1": [ { "Model": "voyage-lite-02-instruct" } ] }, "Classification": { "accuracy": [ { "Model": "voyage-lite-02-instruct", "AmazonCounterfactualClassification (en)": 88.31, "AmazonPolarityClassification": 96.32, "AmazonReviewsClassification (en)": 56.25, "Banking77Classification": 88.59, "EmotionClassification": 50.28, "ImdbClassification": 95.75, "MTOPDomainClassification (en)": 97.65, "MTOPIntentClassification (en)": 75.16, "MassiveIntentClassification (en)": 73.97, "MassiveScenarioClassification (en)": 83.99, "ToxicConversationsClassification": 81.75, "TweetSentimentExtractionClassification": 62.98 } ] }, "Clustering": { "v_measure": [ { "Model": "voyage-lite-02-instruct", "ArxivClusteringP2P": 51.95, "ArxivClusteringS2S": 42.48, "BiorxivClusteringP2P": 50.15, "BiorxivClusteringS2S": 42.84, "MedrxivClusteringP2P": 47.24, "MedrxivClusteringS2S": 43.48, "RedditClustering": 63.73, "RedditClusteringP2P": 64.09, "StackExchangeClustering": 70.71, "StackExchangeClusteringP2P": 40.34, "TwentyNewsgroupsClustering": 59.56 } ] }, "PairClassification": { "ap": [ { "Model": "voyage-lite-02-instruct", "SprintDuplicateQuestions": 98.07, "TwitterSemEval2015": 74.44, "TwitterURLCorpus": 88.11 } ] }, "Reranking": { "map": [ { "Model": "voyage-lite-02-instruct", "AskUbuntuDupQuestions": 63.24, "MindSmallReranking": 31.48, "SciDocsRR": 84.68, "StackOverflowDupQuestions": 53.56 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "voyage-lite-02-instruct", "ArguAna": 70.28, "CQADupstackRetrieval": 46.2, "ClimateFEVER": 31.95, "DBPedia": 39.79, "FEVER": 91.35, "FiQA2018": 52.51, "HotpotQA": 75.51, "MSMARCO": 37.93, "NFCorpus": 43.7, "NQ": 64.26, "QuoraRetrieval": 87.62, "SCIDOCS": 20.24, "SciFact": 79.91, "TRECCOVID": 81.02, "Touche2020": 26.8 } ] }, "STS": { "spearman": [ { "Model": "voyage-lite-02-instruct", "BIOSSES": 89.7, "SICK-R": 78.44, "STS12": 86.46, "STS13": 87.76, "STS14": 86.6, "STS15": 90.1, "STS16": 86.39, "STS17 (en-en)": 86.98, "STS22 (en)": 76.89, "STSBenchmark": 88.56 } ] }, "Summarization": { "spearman": [ { "Model": "voyage-lite-02-instruct", "SummEval": 31.01 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "voyage-lite-02-instruct" } ] } }, "voyage-lite-01-instruct": { "BitextMining": { "f1": [ { "Model": "voyage-lite-01-instruct" } ] }, "Classification": { "accuracy": [ { "Model": "voyage-lite-01-instruct", "AmazonCounterfactualClassification (en)": 71.43, "AmazonPolarityClassification": 96.41, "AmazonReviewsClassification (en)": 57.06, "Banking77Classification": 81.64, "EmotionClassification": 48.29, "ImdbClassification": 95.49, "MTOPDomainClassification (en)": 96.3, "MTOPIntentClassification (en)": 67.93, "MassiveIntentClassification (en)": 71.29, "MassiveScenarioClassification (en)": 76.74, "ToxicConversationsClassification": 75.45, "TweetSentimentExtractionClassification": 59.44 } ] }, "Clustering": { "v_measure": [ { "Model": "voyage-lite-01-instruct", "ArxivClusteringP2P": 47.92, "ArxivClusteringS2S": 42.42, "BiorxivClusteringP2P": 38.72, "BiorxivClusteringS2S": 36.6, "MedrxivClusteringP2P": 34.04, "MedrxivClusteringS2S": 32.81, "RedditClustering": 61.56, "RedditClusteringP2P": 65.35, "StackExchangeClustering": 70.16, "StackExchangeClusteringP2P": 38.23, "TwentyNewsgroupsClustering": 53.56 } ] }, "PairClassification": { "ap": [ { "Model": "voyage-lite-01-instruct", "SprintDuplicateQuestions": 96.01, "TwitterSemEval2015": 76.87, "TwitterURLCorpus": 86.84 } ] }, "Reranking": { "map": [ { "Model": "voyage-lite-01-instruct", "AskUbuntuDupQuestions": 65.77, "MindSmallReranking": 31.69, "SciDocsRR": 87.03, "StackOverflowDupQuestions": 54.49 } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "voyage-lite-01-instruct", "ArguAna": 58.73, "CQADupstackRetrieval": 45.11, "ClimateFEVER": 37.47, "DBPedia": 43.42, "FEVER": 89.71, "FiQA2018": 44.79, "HotpotQA": 70.46, "MSMARCO": 39.66, "NFCorpus": 43.33, "NQ": 60.65, "QuoraRetrieval": 87.83, "SCIDOCS": 23.19, "SciFact": 73.64, "TRECCOVID": 78.92, "Touche2020": 36.83 } ] }, "STS": { "spearman": [ { "Model": "voyage-lite-01-instruct", "BIOSSES": 84.85, "SICK-R": 79.71, "STS12": 77.09, "STS13": 88.91, "STS14": 82.08, "STS15": 89.21, "STS16": 84.74, "STS17 (en-en)": 90.73, "STS22 (en)": 62.1, "STSBenchmark": 89.86 } ] }, "Summarization": { "spearman": [ { "Model": "voyage-lite-01-instruct", "SummEval": 30.97 } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "voyage-lite-01-instruct" } ] } }, "silver-retriever-base-v1": { "BitextMining": { "f1": [ { "Model": "silver-retriever-base-v1" } ] }, "Classification": { "accuracy": [ { "Model": "silver-retriever-base-v1", "AllegroReviews": 33.35, "CBD": 68.51, "MassiveIntentClassification (pl)": 66.63, "MassiveScenarioClassification (pl)": 69.97, "PAC": 66.26, "PolEmo2.0-IN": 63.52, "PolEmo2.0-OUT": 44.7 } ] }, "Clustering": { "v_measure": [ { "Model": "silver-retriever-base-v1", "8TagsClustering": 31.49 } ] }, "PairClassification": { "ap": [ { "Model": "silver-retriever-base-v1", "CDSC-E": 67.35, "PPC": 85.33, "PSC": 98.46, "SICK-E-PL": 58.19 } ] }, "Reranking": { "map": [ { "Model": "silver-retriever-base-v1" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "silver-retriever-base-v1", "ArguAna-PL": 44.12, "DBPedia-PL": 26.32, "FiQA-PL": 24.95, "HotpotQA-PL": 45.13, "MSMARCO-PL": 25.47, "NFCorpus-PL": 28.55, "NQ-PL": 37.9, "Quora-PL": 77.98, "SCIDOCS-PL": 10.9, "SciFact-PL": 54.44, "TRECCOVID-PL": 46.98 } ] }, "STS": { "spearman": [ { "Model": "silver-retriever-base-v1", "CDSC-R": 89.09, "SICK-R-PL": 67.26, "STS22 (pl)": 38.69 } ] }, "Summarization": { "spearman": [ { "Model": "silver-retriever-base-v1" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "silver-retriever-base-v1" } ] } }, "electra-small-nordic": { "BitextMining": { "f1": [ { "Model": "electra-small-nordic", "BornholmBitextMining": 1.44 } ] }, "Classification": { "accuracy": [ { "Model": "electra-small-nordic", "AngryTweetsClassification": 47.91, "DKHateClassification": 59.45, "DanishPoliticalCommentsClassification": 31.89, "LccSentimentClassification": 47.93, "MassiveIntentClassification (da)": 26.3, "MassiveIntentClassification (nb)": 24.6, "MassiveIntentClassification (sv)": 27.58, "MassiveScenarioClassification (da)": 28.93, "MassiveScenarioClassification (nb)": 27.3, "MassiveScenarioClassification (sv)": 29.93, "NoRecClassification": 45.44, "NordicLangClassification": 57.82, "NorwegianParliament": 53.25, "ScalaDaClassification": 70.41, "ScalaNbClassification": 75.28 } ] }, "Clustering": { "v_measure": [ { "Model": "electra-small-nordic" } ] }, "PairClassification": { "ap": [ { "Model": "electra-small-nordic" } ] }, "Reranking": { "map": [ { "Model": "electra-small-nordic" } ] }, "Retrieval": { "ndcg_at_10": [ { "Model": "electra-small-nordic" } ] }, "STS": { "spearman": [ { "Model": "electra-small-nordic" } ] }, "Summarization": { "spearman": [ { "Model": "electra-small-nordic" } ] }, "InstructionRetrieval": { "p-MRR": [ { "Model": "electra-small-nordic" } ] } } }