Datasets:
mteb
/

Muennighoff KennethEnevoldsen commited on
Commit
7a67ef5
1 Parent(s): 11a9906

Added results on MTEB(Multilingual) and HardNegative retrieval tasks

Browse files

* Add

* Add res

* fix: reduce sizes of files and added makefile command

---------

Co-authored-by: Kenneth Enevoldsen <[email protected]>

This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. makefile +5 -1
  2. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/AILAStatutes.json +158 -0
  3. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/AfriSentiClassification.json +755 -0
  4. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/AlloProfClusteringS2S.v2.json +34 -0
  5. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/AlloprofReranking.json +26 -0
  6. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/AmazonCounterfactualClassification.json +685 -0
  7. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/ArXivHierarchicalClusteringP2P.json +46 -0
  8. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/ArXivHierarchicalClusteringS2S.json +46 -0
  9. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/ArguAna.json +158 -0
  10. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/ArmenianParaphrasePC.json +58 -0
  11. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/BUCC.v2.json +59 -0
  12. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/BelebeleRetrieval.json +0 -0
  13. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/BibleNLPBitextMining.json +0 -0
  14. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/BigPatentClustering.v2.json +34 -0
  15. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/BiorxivClusteringP2P.v2.json +34 -0
  16. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/BornholmBitextMining.json +22 -0
  17. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/BrazilianToxicTweetsClassification.json +73 -0
  18. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/BulgarianStoreReviewSentimentClassfication.json +73 -0
  19. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/CEDRClassification.json +73 -0
  20. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/CLSClusteringP2P.v2.json +34 -0
  21. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/CSFDSKMovieReviewSentimentClassification.json +73 -0
  22. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/CTKFactsNLI.json +107 -0
  23. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/CataloniaTweetClassification.json +261 -0
  24. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/ClimateFEVERHardNegatives.json +158 -0
  25. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/Core17InstructionRetrieval.json +137 -0
  26. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/CovidRetrieval.json +158 -0
  27. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/CyrillicTurkicLangClassification.json +81 -0
  28. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/CzechProductReviewSentimentClassification.json +73 -0
  29. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/DBPedia-PLHardNegatives.json +158 -0
  30. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/DBPediaHardNegatives.json +158 -0
  31. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/DBpediaClassification.json +73 -0
  32. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/DalajClassification.json +95 -0
  33. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/DiaBlaBitextMining.json +35 -0
  34. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/EstonianValenceClassification.json +73 -0
  35. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/FEVERHardNegatives.json +158 -0
  36. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/FaroeseSTS.json +26 -0
  37. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/FilipinoShopeeReviewsClassification.json +137 -0
  38. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/FinParaSTS.json +43 -0
  39. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/FinancialPhrasebankClassification.json +73 -0
  40. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/FloresBitextMining.json +0 -0
  41. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/GermanSTSBenchmark.json +43 -0
  42. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/GreekLegalCodeClassification.json +137 -0
  43. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/GujaratiNewsClassification.json +73 -0
  44. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/HALClusteringS2S.v2.json +34 -0
  45. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/HagridRetrieval.json +158 -0
  46. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/HotpotQA-PLHardNegatives.json +158 -0
  47. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/HotpotQAHardNegatives.json +158 -0
  48. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/IN22GenBitextMining.json +0 -0
  49. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/IndicCrosslingualSTS.json +203 -0
  50. results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/IndicGenBenchFloresBitextMining.json +1405 -0
makefile CHANGED
@@ -5,4 +5,8 @@ install-for-tests:
5
 
6
  test:
7
  @echo "--- Running tests ---"
8
- pytest
 
 
 
 
 
5
 
6
  test:
7
  @echo "--- Running tests ---"
8
+ pytest
9
+
10
+ pre-push:
11
+ @echo "--- Running pre-push commands ---"
12
+ python reduce_large_json_files.py
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/AILAStatutes.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "ebfcd844eadd3d667efa3c57fc5c8c87f5c2867e",
3
+ "evaluation_time": 2.1449167728424072,
4
+ "kg_co2_emissions": 0.00018021358730526757,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "eng-Latn"
12
+ ],
13
+ "main_score": 0.33572,
14
+ "map_at_1": 0.098,
15
+ "map_at_10": 0.21049,
16
+ "map_at_100": 0.27684,
17
+ "map_at_1000": 0.27684,
18
+ "map_at_20": 0.23128,
19
+ "map_at_3": 0.15517,
20
+ "map_at_5": 0.17602,
21
+ "mrr_at_1": 0.42,
22
+ "mrr_at_10": 0.5489920634920634,
23
+ "mrr_at_100": 0.5579525811353396,
24
+ "mrr_at_1000": 0.5579525811353396,
25
+ "mrr_at_20": 0.5555538350538349,
26
+ "mrr_at_3": 0.5099999999999999,
27
+ "mrr_at_5": 0.5289999999999999,
28
+ "nauc_map_at_1000_diff1": 0.12334961747338444,
29
+ "nauc_map_at_1000_max": 0.30619209224293353,
30
+ "nauc_map_at_1000_std": -0.030718990518868516,
31
+ "nauc_map_at_100_diff1": 0.12334961747338444,
32
+ "nauc_map_at_100_max": 0.30619209224293353,
33
+ "nauc_map_at_100_std": -0.030718990518868516,
34
+ "nauc_map_at_10_diff1": 0.12326986123721569,
35
+ "nauc_map_at_10_max": 0.33448409936254264,
36
+ "nauc_map_at_10_std": -0.005586989019712461,
37
+ "nauc_map_at_1_diff1": 0.051085650745113044,
38
+ "nauc_map_at_1_max": 0.291641432443434,
39
+ "nauc_map_at_1_std": -0.039822086315935736,
40
+ "nauc_map_at_20_diff1": 0.14097198525881416,
41
+ "nauc_map_at_20_max": 0.32616722893992217,
42
+ "nauc_map_at_20_std": -0.02786122313893844,
43
+ "nauc_map_at_3_diff1": 0.14474626451994102,
44
+ "nauc_map_at_3_max": 0.3591146061343692,
45
+ "nauc_map_at_3_std": -0.000559546471639301,
46
+ "nauc_map_at_5_diff1": 0.10546517261748972,
47
+ "nauc_map_at_5_max": 0.35085876307049013,
48
+ "nauc_map_at_5_std": 0.0020084071107343703,
49
+ "nauc_mrr_at_1000_diff1": 0.035944630624480334,
50
+ "nauc_mrr_at_1000_max": 0.2340912362618136,
51
+ "nauc_mrr_at_1000_std": -0.054160371582013825,
52
+ "nauc_mrr_at_100_diff1": 0.035944630624480334,
53
+ "nauc_mrr_at_100_max": 0.2340912362618136,
54
+ "nauc_mrr_at_100_std": -0.054160371582013825,
55
+ "nauc_mrr_at_10_diff1": 0.035670988619914745,
56
+ "nauc_mrr_at_10_max": 0.2328703059621917,
57
+ "nauc_mrr_at_10_std": -0.04531014796575209,
58
+ "nauc_mrr_at_1_diff1": 0.016123596961920575,
59
+ "nauc_mrr_at_1_max": 0.18407341161832186,
60
+ "nauc_mrr_at_1_std": -0.11385022163465279,
61
+ "nauc_mrr_at_20_diff1": 0.035045783552563894,
62
+ "nauc_mrr_at_20_max": 0.23431242727526086,
63
+ "nauc_mrr_at_20_std": -0.05011543297767148,
64
+ "nauc_mrr_at_3_diff1": 0.0667203800686233,
65
+ "nauc_mrr_at_3_max": 0.2589217497603732,
66
+ "nauc_mrr_at_3_std": -0.07863919874421772,
67
+ "nauc_mrr_at_5_diff1": 0.016788634150608658,
68
+ "nauc_mrr_at_5_max": 0.2233374491085115,
69
+ "nauc_mrr_at_5_std": -0.0389593255291351,
70
+ "nauc_ndcg_at_1000_diff1": 0.08036072110815415,
71
+ "nauc_ndcg_at_1000_max": 0.25314596958217483,
72
+ "nauc_ndcg_at_1000_std": -0.04763471197741127,
73
+ "nauc_ndcg_at_100_diff1": 0.08036072110815415,
74
+ "nauc_ndcg_at_100_max": 0.25314596958217483,
75
+ "nauc_ndcg_at_100_std": -0.04763471197741127,
76
+ "nauc_ndcg_at_10_diff1": 0.10691068836439659,
77
+ "nauc_ndcg_at_10_max": 0.30903016453390264,
78
+ "nauc_ndcg_at_10_std": 0.005505461793332526,
79
+ "nauc_ndcg_at_1_diff1": 0.016123596961920575,
80
+ "nauc_ndcg_at_1_max": 0.18407341161832186,
81
+ "nauc_ndcg_at_1_std": -0.11385022163465279,
82
+ "nauc_ndcg_at_20_diff1": 0.13861577805806724,
83
+ "nauc_ndcg_at_20_max": 0.3057236603316381,
84
+ "nauc_ndcg_at_20_std": -0.023205629408024175,
85
+ "nauc_ndcg_at_3_diff1": 0.11908115635412221,
86
+ "nauc_ndcg_at_3_max": 0.2912550866483106,
87
+ "nauc_ndcg_at_3_std": -0.03811034731037659,
88
+ "nauc_ndcg_at_5_diff1": 0.08104210450975674,
89
+ "nauc_ndcg_at_5_max": 0.31527897011513895,
90
+ "nauc_ndcg_at_5_std": 0.006544003750270816,
91
+ "nauc_precision_at_1000_diff1": -0.35315855911893174,
92
+ "nauc_precision_at_1000_max": -0.5976107279413103,
93
+ "nauc_precision_at_1000_std": -0.38996725270302257,
94
+ "nauc_precision_at_100_diff1": -0.3531585591189289,
95
+ "nauc_precision_at_100_max": -0.5976107279413052,
96
+ "nauc_precision_at_100_std": -0.3899672527030205,
97
+ "nauc_precision_at_10_diff1": 0.05138605745990966,
98
+ "nauc_precision_at_10_max": 0.1751749647968724,
99
+ "nauc_precision_at_10_std": -0.006504697253105322,
100
+ "nauc_precision_at_1_diff1": 0.016123596961920575,
101
+ "nauc_precision_at_1_max": 0.18407341161832186,
102
+ "nauc_precision_at_1_std": -0.11385022163465279,
103
+ "nauc_precision_at_20_diff1": 0.09554317272068041,
104
+ "nauc_precision_at_20_max": 0.11491204545228778,
105
+ "nauc_precision_at_20_std": -0.09871172164446725,
106
+ "nauc_precision_at_3_diff1": 0.14812347324006228,
107
+ "nauc_precision_at_3_max": 0.291549223594527,
108
+ "nauc_precision_at_3_std": -0.027742188968038237,
109
+ "nauc_precision_at_5_diff1": 0.04836676217765031,
110
+ "nauc_precision_at_5_max": 0.23323782234957008,
111
+ "nauc_precision_at_5_std": 0.009212034383954164,
112
+ "nauc_recall_at_1000_diff1": NaN,
113
+ "nauc_recall_at_1000_max": NaN,
114
+ "nauc_recall_at_1000_std": NaN,
115
+ "nauc_recall_at_100_diff1": NaN,
116
+ "nauc_recall_at_100_max": NaN,
117
+ "nauc_recall_at_100_std": NaN,
118
+ "nauc_recall_at_10_diff1": 0.13946061260668566,
119
+ "nauc_recall_at_10_max": 0.3110333408713887,
120
+ "nauc_recall_at_10_std": 0.055293217173481755,
121
+ "nauc_recall_at_1_diff1": 0.051085650745113044,
122
+ "nauc_recall_at_1_max": 0.291641432443434,
123
+ "nauc_recall_at_1_std": -0.039822086315935736,
124
+ "nauc_recall_at_20_diff1": 0.22936946064692115,
125
+ "nauc_recall_at_20_max": 0.3165148473600486,
126
+ "nauc_recall_at_20_std": 0.012884914021665129,
127
+ "nauc_recall_at_3_diff1": 0.16820451669101075,
128
+ "nauc_recall_at_3_max": 0.38184856739462425,
129
+ "nauc_recall_at_3_std": 0.018151654250827248,
130
+ "nauc_recall_at_5_diff1": 0.07711443028168376,
131
+ "nauc_recall_at_5_max": 0.3253949638880427,
132
+ "nauc_recall_at_5_std": 0.05426185329620741,
133
+ "ndcg_at_1": 0.42,
134
+ "ndcg_at_10": 0.33572,
135
+ "ndcg_at_100": 0.54256,
136
+ "ndcg_at_1000": 0.54256,
137
+ "ndcg_at_20": 0.38576,
138
+ "ndcg_at_3": 0.30224,
139
+ "ndcg_at_5": 0.27254,
140
+ "precision_at_1": 0.42,
141
+ "precision_at_10": 0.154,
142
+ "precision_at_100": 0.0434,
143
+ "precision_at_1000": 0.00434,
144
+ "precision_at_20": 0.104,
145
+ "precision_at_3": 0.26,
146
+ "precision_at_5": 0.2,
147
+ "recall_at_1": 0.098,
148
+ "recall_at_10": 0.36067,
149
+ "recall_at_100": 1.0,
150
+ "recall_at_1000": 1.0,
151
+ "recall_at_20": 0.481,
152
+ "recall_at_3": 0.18667,
153
+ "recall_at_5": 0.23767
154
+ }
155
+ ]
156
+ },
157
+ "task_name": "AILAStatutes"
158
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/AfriSentiClassification.json ADDED
@@ -0,0 +1,755 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "b52e930385cf5ed7f063072c3f7bd17b599a16cf",
3
+ "evaluation_time": 27.060703992843628,
4
+ "kg_co2_emissions": 0.0011233188933602552,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.4117558779389695,
10
+ "f1": 0.3538818770432014,
11
+ "f1_weighted": 0.4411069966238198,
12
+ "hf_subset": "amh",
13
+ "languages": [
14
+ "amh-Ethi"
15
+ ],
16
+ "main_score": 0.4117558779389695,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.5362681340670336,
20
+ "f1": 0.409099779375802,
21
+ "f1_weighted": 0.5657015736188191
22
+ },
23
+ {
24
+ "accuracy": 0.5637818909454727,
25
+ "f1": 0.4367136437567886,
26
+ "f1_weighted": 0.5721522305977169
27
+ },
28
+ {
29
+ "accuracy": 0.3646823411705853,
30
+ "f1": 0.33761906257798263,
31
+ "f1_weighted": 0.37219605661314303
32
+ },
33
+ {
34
+ "accuracy": 0.3736868434217109,
35
+ "f1": 0.3507535239404194,
36
+ "f1_weighted": 0.4085696956215641
37
+ },
38
+ {
39
+ "accuracy": 0.49724862431215605,
40
+ "f1": 0.38035975018940493,
41
+ "f1_weighted": 0.5256811665609029
42
+ },
43
+ {
44
+ "accuracy": 0.35317658829414705,
45
+ "f1": 0.3009877490859866,
46
+ "f1_weighted": 0.4134573487004805
47
+ },
48
+ {
49
+ "accuracy": 0.3441720860430215,
50
+ "f1": 0.3338509707126018,
51
+ "f1_weighted": 0.3701432273702633
52
+ },
53
+ {
54
+ "accuracy": 0.33466733366683343,
55
+ "f1": 0.3195396274926619,
56
+ "f1_weighted": 0.37828355986146445
57
+ },
58
+ {
59
+ "accuracy": 0.39769884942471234,
60
+ "f1": 0.3465704538426834,
61
+ "f1_weighted": 0.4146391473638792
62
+ },
63
+ {
64
+ "accuracy": 0.352176088044022,
65
+ "f1": 0.3233242094576823,
66
+ "f1_weighted": 0.3902459599299637
67
+ }
68
+ ]
69
+ },
70
+ {
71
+ "accuracy": 0.4461377870563673,
72
+ "f1": 0.4193361813460091,
73
+ "f1_weighted": 0.4607328061088453,
74
+ "hf_subset": "arq",
75
+ "languages": [
76
+ "arq-Arab"
77
+ ],
78
+ "main_score": 0.4461377870563673,
79
+ "scores_per_experiment": [
80
+ {
81
+ "accuracy": 0.4311064718162839,
82
+ "f1": 0.42343437730444683,
83
+ "f1_weighted": 0.4628142951505578
84
+ },
85
+ {
86
+ "accuracy": 0.44780793319415446,
87
+ "f1": 0.4060033319373996,
88
+ "f1_weighted": 0.4677379693338302
89
+ },
90
+ {
91
+ "accuracy": 0.55741127348643,
92
+ "f1": 0.5030494081410873,
93
+ "f1_weighted": 0.556381628523794
94
+ },
95
+ {
96
+ "accuracy": 0.42066805845511485,
97
+ "f1": 0.4031631647511242,
98
+ "f1_weighted": 0.4415988605815632
99
+ },
100
+ {
101
+ "accuracy": 0.44467640918580376,
102
+ "f1": 0.43008598309031076,
103
+ "f1_weighted": 0.46541691839535876
104
+ },
105
+ {
106
+ "accuracy": 0.36325678496868474,
107
+ "f1": 0.34191236838666444,
108
+ "f1_weighted": 0.37008396875060856
109
+ },
110
+ {
111
+ "accuracy": 0.4342379958246347,
112
+ "f1": 0.400147458473229,
113
+ "f1_weighted": 0.45213457052696115
114
+ },
115
+ {
116
+ "accuracy": 0.453027139874739,
117
+ "f1": 0.42283188308481257,
118
+ "f1_weighted": 0.4571350418734149
119
+ },
120
+ {
121
+ "accuracy": 0.48434237995824636,
122
+ "f1": 0.44691079207927736,
123
+ "f1_weighted": 0.48520472309931933
124
+ },
125
+ {
126
+ "accuracy": 0.42484342379958245,
127
+ "f1": 0.41582304621173877,
128
+ "f1_weighted": 0.4488200848530452
129
+ }
130
+ ]
131
+ },
132
+ {
133
+ "accuracy": 0.43095703125,
134
+ "f1": 0.4139935825907789,
135
+ "f1_weighted": 0.4193284318657633,
136
+ "hf_subset": "ary",
137
+ "languages": [
138
+ "ary-Arab"
139
+ ],
140
+ "main_score": 0.43095703125,
141
+ "scores_per_experiment": [
142
+ {
143
+ "accuracy": 0.47119140625,
144
+ "f1": 0.4661666033482977,
145
+ "f1_weighted": 0.4658614710687616
146
+ },
147
+ {
148
+ "accuracy": 0.46240234375,
149
+ "f1": 0.4482738081601915,
150
+ "f1_weighted": 0.4570301624865782
151
+ },
152
+ {
153
+ "accuracy": 0.30224609375,
154
+ "f1": 0.27112514903201773,
155
+ "f1_weighted": 0.263407997498876
156
+ },
157
+ {
158
+ "accuracy": 0.4248046875,
159
+ "f1": 0.4062600045364475,
160
+ "f1_weighted": 0.41363751935533055
161
+ },
162
+ {
163
+ "accuracy": 0.50390625,
164
+ "f1": 0.4928000092100741,
165
+ "f1_weighted": 0.5022472392053605
166
+ },
167
+ {
168
+ "accuracy": 0.37890625,
169
+ "f1": 0.3700613488786734,
170
+ "f1_weighted": 0.376209286266397
171
+ },
172
+ {
173
+ "accuracy": 0.43798828125,
174
+ "f1": 0.39789840729666737,
175
+ "f1_weighted": 0.412228632347978
176
+ },
177
+ {
178
+ "accuracy": 0.4462890625,
179
+ "f1": 0.4352119932252138,
180
+ "f1_weighted": 0.44126627405618324
181
+ },
182
+ {
183
+ "accuracy": 0.44775390625,
184
+ "f1": 0.4462646562142374,
185
+ "f1_weighted": 0.4425446074391396
186
+ },
187
+ {
188
+ "accuracy": 0.43408203125,
189
+ "f1": 0.40587384600596815,
190
+ "f1_weighted": 0.4188511289330279
191
+ }
192
+ ]
193
+ },
194
+ {
195
+ "accuracy": 0.591259765625,
196
+ "f1": 0.3274045740388073,
197
+ "f1_weighted": 0.6797075643348555,
198
+ "hf_subset": "hau",
199
+ "languages": [
200
+ "hau-Latn"
201
+ ],
202
+ "main_score": 0.591259765625,
203
+ "scores_per_experiment": [
204
+ {
205
+ "accuracy": 0.65185546875,
206
+ "f1": 0.28481940472407585,
207
+ "f1_weighted": 0.7274548335964808
208
+ },
209
+ {
210
+ "accuracy": 0.3349609375,
211
+ "f1": 0.25484453623343567,
212
+ "f1_weighted": 0.4840623969991029
213
+ },
214
+ {
215
+ "accuracy": 0.40380859375,
216
+ "f1": 0.21826267960089787,
217
+ "f1_weighted": 0.5473816520914759
218
+ },
219
+ {
220
+ "accuracy": 0.701171875,
221
+ "f1": 0.425924187188597,
222
+ "f1_weighted": 0.7676727390256695
223
+ },
224
+ {
225
+ "accuracy": 0.80322265625,
226
+ "f1": 0.36697161578151744,
227
+ "f1_weighted": 0.8044586659394272
228
+ },
229
+ {
230
+ "accuracy": 0.36328125,
231
+ "f1": 0.2257465240578054,
232
+ "f1_weighted": 0.5127407488384619
233
+ },
234
+ {
235
+ "accuracy": 0.654296875,
236
+ "f1": 0.37968088267408434,
237
+ "f1_weighted": 0.748575017527803
238
+ },
239
+ {
240
+ "accuracy": 0.46826171875,
241
+ "f1": 0.24172430568775513,
242
+ "f1_weighted": 0.5983225359318484
243
+ },
244
+ {
245
+ "accuracy": 0.73046875,
246
+ "f1": 0.43522563402081477,
247
+ "f1_weighted": 0.780486811080867
248
+ },
249
+ {
250
+ "accuracy": 0.80126953125,
251
+ "f1": 0.44084597041908885,
252
+ "f1_weighted": 0.8259202423174181
253
+ }
254
+ ]
255
+ },
256
+ {
257
+ "accuracy": 0.50693359375,
258
+ "f1": 0.3008944695972054,
259
+ "f1_weighted": 0.4405200650258562,
260
+ "hf_subset": "ibo",
261
+ "languages": [
262
+ "ibo-Latn"
263
+ ],
264
+ "main_score": 0.50693359375,
265
+ "scores_per_experiment": [
266
+ {
267
+ "accuracy": 0.5302734375,
268
+ "f1": 0.3258665722410638,
269
+ "f1_weighted": 0.4750645886463196
270
+ },
271
+ {
272
+ "accuracy": 0.60693359375,
273
+ "f1": 0.3997163171718265,
274
+ "f1_weighted": 0.5931855549395071
275
+ },
276
+ {
277
+ "accuracy": 0.462890625,
278
+ "f1": 0.2882633741961083,
279
+ "f1_weighted": 0.4555791468498878
280
+ },
281
+ {
282
+ "accuracy": 0.50244140625,
283
+ "f1": 0.36072783122978286,
284
+ "f1_weighted": 0.5375140201449834
285
+ },
286
+ {
287
+ "accuracy": 0.4560546875,
288
+ "f1": 0.22764378945300612,
289
+ "f1_weighted": 0.31582285460135995
290
+ },
291
+ {
292
+ "accuracy": 0.453125,
293
+ "f1": 0.20802510647836805,
294
+ "f1_weighted": 0.2833935776731674
295
+ },
296
+ {
297
+ "accuracy": 0.49560546875,
298
+ "f1": 0.27164396468564095,
299
+ "f1_weighted": 0.3864332636197854
300
+ },
301
+ {
302
+ "accuracy": 0.49755859375,
303
+ "f1": 0.29875099048563725,
304
+ "f1_weighted": 0.4310219333335471
305
+ },
306
+ {
307
+ "accuracy": 0.59765625,
308
+ "f1": 0.38610532092077526,
309
+ "f1_weighted": 0.5876587286435371
310
+ },
311
+ {
312
+ "accuracy": 0.466796875,
313
+ "f1": 0.24220142910984455,
314
+ "f1_weighted": 0.3395269818064673
315
+ }
316
+ ]
317
+ },
318
+ {
319
+ "accuracy": 0.43196881091617934,
320
+ "f1": 0.42603140142236756,
321
+ "f1_weighted": 0.4232062000392361,
322
+ "hf_subset": "kin",
323
+ "languages": [
324
+ "kin-Latn"
325
+ ],
326
+ "main_score": 0.43196881091617934,
327
+ "scores_per_experiment": [
328
+ {
329
+ "accuracy": 0.4844054580896686,
330
+ "f1": 0.4874164457497791,
331
+ "f1_weighted": 0.4810725713503491
332
+ },
333
+ {
334
+ "accuracy": 0.4152046783625731,
335
+ "f1": 0.40457369689583017,
336
+ "f1_weighted": 0.40151336707516017
337
+ },
338
+ {
339
+ "accuracy": 0.4044834307992203,
340
+ "f1": 0.3800564192612245,
341
+ "f1_weighted": 0.3777964150003722
342
+ },
343
+ {
344
+ "accuracy": 0.46296296296296297,
345
+ "f1": 0.45680714967735075,
346
+ "f1_weighted": 0.46275611660967186
347
+ },
348
+ {
349
+ "accuracy": 0.4064327485380117,
350
+ "f1": 0.4073042315390474,
351
+ "f1_weighted": 0.40652127267173366
352
+ },
353
+ {
354
+ "accuracy": 0.46101364522417154,
355
+ "f1": 0.44961968302494065,
356
+ "f1_weighted": 0.4486085256910102
357
+ },
358
+ {
359
+ "accuracy": 0.42007797270955166,
360
+ "f1": 0.4197710338199097,
361
+ "f1_weighted": 0.4146224985860841
362
+ },
363
+ {
364
+ "accuracy": 0.4220272904483431,
365
+ "f1": 0.41770018679851145,
366
+ "f1_weighted": 0.4132634579844758
367
+ },
368
+ {
369
+ "accuracy": 0.4249512670565302,
370
+ "f1": 0.4197330879807777,
371
+ "f1_weighted": 0.4150464380610991
372
+ },
373
+ {
374
+ "accuracy": 0.41812865497076024,
375
+ "f1": 0.417332079476304,
376
+ "f1_weighted": 0.4108613373624048
377
+ }
378
+ ]
379
+ },
380
+ {
381
+ "accuracy": 0.398193359375,
382
+ "f1": 0.3893662096768625,
383
+ "f1_weighted": 0.38917382804241696,
384
+ "hf_subset": "por",
385
+ "languages": [
386
+ "por-Latn"
387
+ ],
388
+ "main_score": 0.398193359375,
389
+ "scores_per_experiment": [
390
+ {
391
+ "accuracy": 0.3603515625,
392
+ "f1": 0.37422753197455166,
393
+ "f1_weighted": 0.34526239835214917
394
+ },
395
+ {
396
+ "accuracy": 0.431640625,
397
+ "f1": 0.423270809662503,
398
+ "f1_weighted": 0.4377996584102659
399
+ },
400
+ {
401
+ "accuracy": 0.5673828125,
402
+ "f1": 0.4795362532863927,
403
+ "f1_weighted": 0.5691239280629279
404
+ },
405
+ {
406
+ "accuracy": 0.36669921875,
407
+ "f1": 0.38003315965216783,
408
+ "f1_weighted": 0.37802603556633824
409
+ },
410
+ {
411
+ "accuracy": 0.2978515625,
412
+ "f1": 0.3153269957212836,
413
+ "f1_weighted": 0.26633712444334384
414
+ },
415
+ {
416
+ "accuracy": 0.4345703125,
417
+ "f1": 0.43400465994524895,
418
+ "f1_weighted": 0.43447698391183437
419
+ },
420
+ {
421
+ "accuracy": 0.2880859375,
422
+ "f1": 0.2946942580607699,
423
+ "f1_weighted": 0.22277871681677786
424
+ },
425
+ {
426
+ "accuracy": 0.37646484375,
427
+ "f1": 0.37076675127732966,
428
+ "f1_weighted": 0.3744119753574705
429
+ },
430
+ {
431
+ "accuracy": 0.34765625,
432
+ "f1": 0.35443809787812636,
433
+ "f1_weighted": 0.3324691404264444
434
+ },
435
+ {
436
+ "accuracy": 0.51123046875,
437
+ "f1": 0.4673635793102511,
438
+ "f1_weighted": 0.5310523190766174
439
+ }
440
+ ]
441
+ },
442
+ {
443
+ "accuracy": 0.407568359375,
444
+ "f1": 0.3308138045129986,
445
+ "f1_weighted": 0.4494618963056286,
446
+ "hf_subset": "pcm",
447
+ "languages": [
448
+ "pcm-Latn"
449
+ ],
450
+ "main_score": 0.407568359375,
451
+ "scores_per_experiment": [
452
+ {
453
+ "accuracy": 0.42333984375,
454
+ "f1": 0.3391245767845159,
455
+ "f1_weighted": 0.47379994061343056
456
+ },
457
+ {
458
+ "accuracy": 0.4873046875,
459
+ "f1": 0.40925979499277204,
460
+ "f1_weighted": 0.5274903472346696
461
+ },
462
+ {
463
+ "accuracy": 0.41552734375,
464
+ "f1": 0.3322250406469815,
465
+ "f1_weighted": 0.4523716596698209
466
+ },
467
+ {
468
+ "accuracy": 0.47412109375,
469
+ "f1": 0.3462137515084433,
470
+ "f1_weighted": 0.5124209674414094
471
+ },
472
+ {
473
+ "accuracy": 0.3896484375,
474
+ "f1": 0.33353910324783453,
475
+ "f1_weighted": 0.438822886824163
476
+ },
477
+ {
478
+ "accuracy": 0.29248046875,
479
+ "f1": 0.26676174100336186,
480
+ "f1_weighted": 0.3347625158070594
481
+ },
482
+ {
483
+ "accuracy": 0.3818359375,
484
+ "f1": 0.3004758400227256,
485
+ "f1_weighted": 0.43527117428904954
486
+ },
487
+ {
488
+ "accuracy": 0.38623046875,
489
+ "f1": 0.3163940240600576,
490
+ "f1_weighted": 0.42718091140987813
491
+ },
492
+ {
493
+ "accuracy": 0.466796875,
494
+ "f1": 0.35027760958211934,
495
+ "f1_weighted": 0.4986129442583248
496
+ },
497
+ {
498
+ "accuracy": 0.3583984375,
499
+ "f1": 0.3138665632811735,
500
+ "f1_weighted": 0.3938856155084812
501
+ }
502
+ ]
503
+ },
504
+ {
505
+ "accuracy": 0.3991978609625669,
506
+ "f1": 0.3731613991213806,
507
+ "f1_weighted": 0.41689602353583943,
508
+ "hf_subset": "swa",
509
+ "languages": [
510
+ "swa-Latn"
511
+ ],
512
+ "main_score": 0.3991978609625669,
513
+ "scores_per_experiment": [
514
+ {
515
+ "accuracy": 0.3663101604278075,
516
+ "f1": 0.35738364414835005,
517
+ "f1_weighted": 0.37222796089578103
518
+ },
519
+ {
520
+ "accuracy": 0.35561497326203206,
521
+ "f1": 0.3252672315063638,
522
+ "f1_weighted": 0.39314089088498394
523
+ },
524
+ {
525
+ "accuracy": 0.4117647058823529,
526
+ "f1": 0.4006465588666841,
527
+ "f1_weighted": 0.4239615036290332
528
+ },
529
+ {
530
+ "accuracy": 0.3409090909090909,
531
+ "f1": 0.3196743225206593,
532
+ "f1_weighted": 0.34314160934030163
533
+ },
534
+ {
535
+ "accuracy": 0.4117647058823529,
536
+ "f1": 0.3847681060230476,
537
+ "f1_weighted": 0.43680325200257525
538
+ },
539
+ {
540
+ "accuracy": 0.45588235294117646,
541
+ "f1": 0.42411423782058205,
542
+ "f1_weighted": 0.47242378850214467
543
+ },
544
+ {
545
+ "accuracy": 0.40106951871657753,
546
+ "f1": 0.3673324568706841,
547
+ "f1_weighted": 0.4324078204244133
548
+ },
549
+ {
550
+ "accuracy": 0.3890374331550802,
551
+ "f1": 0.37854852587206445,
552
+ "f1_weighted": 0.39124692243748416
553
+ },
554
+ {
555
+ "accuracy": 0.47593582887700536,
556
+ "f1": 0.41828589673417255,
557
+ "f1_weighted": 0.4916683496805201
558
+ },
559
+ {
560
+ "accuracy": 0.3836898395721925,
561
+ "f1": 0.3555930108511973,
562
+ "f1_weighted": 0.41193813756115716
563
+ }
564
+ ]
565
+ },
566
+ {
567
+ "accuracy": 0.3596417281348788,
568
+ "f1": 0.33725801842654796,
569
+ "f1_weighted": 0.368201514000201,
570
+ "hf_subset": "twi",
571
+ "languages": [
572
+ "twi-Latn"
573
+ ],
574
+ "main_score": 0.3596417281348788,
575
+ "scores_per_experiment": [
576
+ {
577
+ "accuracy": 0.3761854583772392,
578
+ "f1": 0.3345998159590671,
579
+ "f1_weighted": 0.37682980132985744
580
+ },
581
+ {
582
+ "accuracy": 0.39199157007376184,
583
+ "f1": 0.36573652861670863,
584
+ "f1_weighted": 0.39825494332528194
585
+ },
586
+ {
587
+ "accuracy": 0.3361433087460485,
588
+ "f1": 0.30647155151126076,
589
+ "f1_weighted": 0.3226898650967259
590
+ },
591
+ {
592
+ "accuracy": 0.38250790305584825,
593
+ "f1": 0.3658635631472027,
594
+ "f1_weighted": 0.3901740739825649
595
+ },
596
+ {
597
+ "accuracy": 0.34562697576396206,
598
+ "f1": 0.3241773636012798,
599
+ "f1_weighted": 0.360450729316261
600
+ },
601
+ {
602
+ "accuracy": 0.3329820864067439,
603
+ "f1": 0.3202128319185944,
604
+ "f1_weighted": 0.3437529942272569
605
+ },
606
+ {
607
+ "accuracy": 0.39304531085353,
608
+ "f1": 0.3598989678064107,
609
+ "f1_weighted": 0.3987829408055717
610
+ },
611
+ {
612
+ "accuracy": 0.3266596417281349,
613
+ "f1": 0.3191278176221682,
614
+ "f1_weighted": 0.3374973469072997
615
+ },
616
+ {
617
+ "accuracy": 0.36143308746048475,
618
+ "f1": 0.33840272328894216,
619
+ "f1_weighted": 0.38509432442412606
620
+ },
621
+ {
622
+ "accuracy": 0.3498419388830348,
623
+ "f1": 0.3380890207938451,
624
+ "f1_weighted": 0.36848812058706437
625
+ }
626
+ ]
627
+ },
628
+ {
629
+ "accuracy": 0.3547244094488189,
630
+ "f1": 0.3325904036535291,
631
+ "f1_weighted": 0.36663341162776336,
632
+ "hf_subset": "tso",
633
+ "languages": [
634
+ "tso-Latn"
635
+ ],
636
+ "main_score": 0.3547244094488189,
637
+ "scores_per_experiment": [
638
+ {
639
+ "accuracy": 0.29133858267716534,
640
+ "f1": 0.2808087027914614,
641
+ "f1_weighted": 0.2814851537954307
642
+ },
643
+ {
644
+ "accuracy": 0.31496062992125984,
645
+ "f1": 0.31382712793719336,
646
+ "f1_weighted": 0.3425282179376186
647
+ },
648
+ {
649
+ "accuracy": 0.33858267716535434,
650
+ "f1": 0.33241522606871,
651
+ "f1_weighted": 0.36285971091604313
652
+ },
653
+ {
654
+ "accuracy": 0.4094488188976378,
655
+ "f1": 0.3655936982807039,
656
+ "f1_weighted": 0.42298942968715125
657
+ },
658
+ {
659
+ "accuracy": 0.38188976377952755,
660
+ "f1": 0.351419878296146,
661
+ "f1_weighted": 0.3878851160339238
662
+ },
663
+ {
664
+ "accuracy": 0.33070866141732286,
665
+ "f1": 0.3260088193128002,
666
+ "f1_weighted": 0.3511188669752366
667
+ },
668
+ {
669
+ "accuracy": 0.421259842519685,
670
+ "f1": 0.3598633788424274,
671
+ "f1_weighted": 0.42189060265076783
672
+ },
673
+ {
674
+ "accuracy": 0.3464566929133858,
675
+ "f1": 0.3058872069934759,
676
+ "f1_weighted": 0.35104112076245503
677
+ },
678
+ {
679
+ "accuracy": 0.33858267716535434,
680
+ "f1": 0.33152422357346906,
681
+ "f1_weighted": 0.34577312852025327
682
+ },
683
+ {
684
+ "accuracy": 0.37401574803149606,
685
+ "f1": 0.3585557744389038,
686
+ "f1_weighted": 0.39876276899875296
687
+ }
688
+ ]
689
+ },
690
+ {
691
+ "accuracy": 0.232470703125,
692
+ "f1": 0.15535925149821384,
693
+ "f1_weighted": 0.3053415433390762,
694
+ "hf_subset": "yor",
695
+ "languages": [
696
+ "yor-Latn"
697
+ ],
698
+ "main_score": 0.232470703125,
699
+ "scores_per_experiment": [
700
+ {
701
+ "accuracy": 0.32421875,
702
+ "f1": 0.22085156983831963,
703
+ "f1_weighted": 0.46939374725984023
704
+ },
705
+ {
706
+ "accuracy": 0.12939453125,
707
+ "f1": 0.09301734518288897,
708
+ "f1_weighted": 0.1479560605344487
709
+ },
710
+ {
711
+ "accuracy": 0.27001953125,
712
+ "f1": 0.17513507200978218,
713
+ "f1_weighted": 0.3571933996059899
714
+ },
715
+ {
716
+ "accuracy": 0.40185546875,
717
+ "f1": 0.24734956300257274,
718
+ "f1_weighted": 0.5471696805586251
719
+ },
720
+ {
721
+ "accuracy": 0.1416015625,
722
+ "f1": 0.13356874614651645,
723
+ "f1_weighted": 0.23742127727215404
724
+ },
725
+ {
726
+ "accuracy": 0.47021484375,
727
+ "f1": 0.2723735945324754,
728
+ "f1_weighted": 0.6130048087040855
729
+ },
730
+ {
731
+ "accuracy": 0.0849609375,
732
+ "f1": 0.07314727929481582,
733
+ "f1_weighted": 0.0771046647938684
734
+ },
735
+ {
736
+ "accuracy": 0.1259765625,
737
+ "f1": 0.08802726746371532,
738
+ "f1_weighted": 0.12828825186540846
739
+ },
740
+ {
741
+ "accuracy": 0.30419921875,
742
+ "f1": 0.19275575917526402,
743
+ "f1_weighted": 0.4371866368734791
744
+ },
745
+ {
746
+ "accuracy": 0.072265625,
747
+ "f1": 0.05736631833578801,
748
+ "f1_weighted": 0.03869690592286305
749
+ }
750
+ ]
751
+ }
752
+ ]
753
+ },
754
+ "task_name": "AfriSentiClassification"
755
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/AlloProfClusteringS2S.v2.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "392ba3f5bcc8c51f578786c1fc3dae648662cb9b",
3
+ "evaluation_time": 45.39525628089905,
4
+ "kg_co2_emissions": 0.001302303148780544,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "fra-Latn"
12
+ ],
13
+ "main_score": 0.4422861762265864,
14
+ "v_measure": 0.4422861762265864,
15
+ "v_measure_std": 0.028265493562154468,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.44427172028947115,
19
+ 0.4460777411350978,
20
+ 0.47743267639861864,
21
+ 0.38898042809002265,
22
+ 0.4331213634047222,
23
+ 0.4820867452052145,
24
+ 0.42787400375652235,
25
+ 0.40764561677973804,
26
+ 0.47080504686129543,
27
+ 0.4445664203451614
28
+ ]
29
+ }
30
+ }
31
+ ]
32
+ },
33
+ "task_name": "AlloProfClusteringS2S.v2"
34
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/AlloprofReranking.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "65393d0d7a08a10b4e348135e824f385d420b0fd",
3
+ "evaluation_time": 36.745527267456055,
4
+ "kg_co2_emissions": 0.0033356256398904827,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "fra-Latn"
12
+ ],
13
+ "main_score": 0.7507543109356692,
14
+ "map": 0.7507543109356692,
15
+ "mrr": 0.765236484634153,
16
+ "nAUC_map_diff1": 0.5534608137664327,
17
+ "nAUC_map_max": 0.2631179254628227,
18
+ "nAUC_map_std": 0.24861289860757638,
19
+ "nAUC_mrr_diff1": 0.562991502262229,
20
+ "nAUC_mrr_max": 0.2788123040864697,
21
+ "nAUC_mrr_std": 0.242692577252062
22
+ }
23
+ ]
24
+ },
25
+ "task_name": "AlloprofReranking"
26
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/AmazonCounterfactualClassification.json ADDED
@@ -0,0 +1,685 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "e8379541af4e31359cca9fbcf4b00f2671dba205",
3
+ "evaluation_time": 19.589919328689575,
4
+ "kg_co2_emissions": 0.0007423879328369572,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.7444527736131934,
10
+ "ap": 0.23811050284750995,
11
+ "ap_weighted": 0.23811050284750995,
12
+ "f1": 0.6198233326065762,
13
+ "f1_weighted": 0.7917830702916404,
14
+ "hf_subset": "en-ext",
15
+ "languages": [
16
+ "eng-Latn"
17
+ ],
18
+ "main_score": 0.7444527736131934,
19
+ "scores_per_experiment": [
20
+ {
21
+ "accuracy": 0.820839580209895,
22
+ "ap": 0.3012795473649502,
23
+ "ap_weighted": 0.3012795473649502,
24
+ "f1": 0.6887665402505869,
25
+ "f1_weighted": 0.8492603609606322
26
+ },
27
+ {
28
+ "accuracy": 0.7301349325337332,
29
+ "ap": 0.23094061859441425,
30
+ "ap_weighted": 0.23094061859441425,
31
+ "f1": 0.6092407436338072,
32
+ "f1_weighted": 0.7812950070816802
33
+ },
34
+ {
35
+ "accuracy": 0.7128935532233883,
36
+ "ap": 0.21494410455877222,
37
+ "ap_weighted": 0.21494410455877222,
38
+ "f1": 0.5925437683572701,
39
+ "f1_weighted": 0.7678394550036437
40
+ },
41
+ {
42
+ "accuracy": 0.7106446776611695,
43
+ "ap": 0.20630557313591985,
44
+ "ap_weighted": 0.20630557313591985,
45
+ "f1": 0.5869558242209559,
46
+ "f1_weighted": 0.7658810697454291
47
+ },
48
+ {
49
+ "accuracy": 0.7166416791604198,
50
+ "ap": 0.21704575885647337,
51
+ "ap_weighted": 0.21704575885647337,
52
+ "f1": 0.5955163252733713,
53
+ "f1_weighted": 0.7707332755538141
54
+ },
55
+ {
56
+ "accuracy": 0.7203898050974513,
57
+ "ap": 0.23675855850106914,
58
+ "ap_weighted": 0.23675855850106914,
59
+ "f1": 0.6067997031925764,
60
+ "f1_weighted": 0.7740955854959906
61
+ },
62
+ {
63
+ "accuracy": 0.7488755622188905,
64
+ "ap": 0.22084820898903068,
65
+ "ap_weighted": 0.22084820898903068,
66
+ "f1": 0.6137627643070435,
67
+ "f1_weighted": 0.794598144021759
68
+ },
69
+ {
70
+ "accuracy": 0.7781109445277361,
71
+ "ap": 0.2593422375162138,
72
+ "ap_weighted": 0.2593422375162138,
73
+ "f1": 0.6475095785440612,
74
+ "f1_weighted": 0.8173556899711064
75
+ },
76
+ {
77
+ "accuracy": 0.7218890554722639,
78
+ "ap": 0.23372830716698717,
79
+ "ap_weighted": 0.23372830716698717,
80
+ "f1": 0.606219471733182,
81
+ "f1_weighted": 0.7751642579413015
82
+ },
83
+ {
84
+ "accuracy": 0.7841079460269865,
85
+ "ap": 0.259912113791269,
86
+ "ap_weighted": 0.259912113791269,
87
+ "f1": 0.6509186065529085,
88
+ "f1_weighted": 0.8216078571410472
89
+ }
90
+ ]
91
+ },
92
+ {
93
+ "accuracy": 0.7470149253731344,
94
+ "ap": 0.37887214206552733,
95
+ "ap_weighted": 0.37887214206552733,
96
+ "f1": 0.6880212070574656,
97
+ "f1_weighted": 0.7701697011479598,
98
+ "hf_subset": "en",
99
+ "languages": [
100
+ "eng-Latn"
101
+ ],
102
+ "main_score": 0.7470149253731344,
103
+ "scores_per_experiment": [
104
+ {
105
+ "accuracy": 0.7328358208955223,
106
+ "ap": 0.3584417064079819,
107
+ "ap_weighted": 0.3584417064079819,
108
+ "f1": 0.6727971167513798,
109
+ "f1_weighted": 0.7581483059179657
110
+ },
111
+ {
112
+ "accuracy": 0.7865671641791044,
113
+ "ap": 0.420219648757304,
114
+ "ap_weighted": 0.420219648757304,
115
+ "f1": 0.7260027168084651,
116
+ "f1_weighted": 0.8044480962599599
117
+ },
118
+ {
119
+ "accuracy": 0.6716417910447762,
120
+ "ap": 0.3222668958471571,
121
+ "ap_weighted": 0.3222668958471571,
122
+ "f1": 0.6242249959210312,
123
+ "f1_weighted": 0.7055109304188796
124
+ },
125
+ {
126
+ "accuracy": 0.7611940298507462,
127
+ "ap": 0.3796131015312451,
128
+ "ap_weighted": 0.3796131015312451,
129
+ "f1": 0.6961451247165533,
130
+ "f1_weighted": 0.7817578772802655
131
+ },
132
+ {
133
+ "accuracy": 0.7492537313432835,
134
+ "ap": 0.39074566573672165,
135
+ "ap_weighted": 0.39074566573672165,
136
+ "f1": 0.6952698605215282,
137
+ "f1_weighted": 0.7733741842636424
138
+ },
139
+ {
140
+ "accuracy": 0.7582089552238805,
141
+ "ap": 0.3812403792310172,
142
+ "ap_weighted": 0.3812403792310172,
143
+ "f1": 0.6955263069310264,
144
+ "f1_weighted": 0.7796530191135412
145
+ },
146
+ {
147
+ "accuracy": 0.8164179104477612,
148
+ "ap": 0.4508048469196925,
149
+ "ap_weighted": 0.4508048469196925,
150
+ "f1": 0.7522390482804651,
151
+ "f1_weighted": 0.8290278335128899
152
+ },
153
+ {
154
+ "accuracy": 0.7791044776119403,
155
+ "ap": 0.3911463988980246,
156
+ "ap_weighted": 0.3911463988980246,
157
+ "f1": 0.7094774344009657,
158
+ "f1_weighted": 0.7960866832731537
159
+ },
160
+ {
161
+ "accuracy": 0.7283582089552239,
162
+ "ap": 0.3743688236111616,
163
+ "ap_weighted": 0.3743688236111616,
164
+ "f1": 0.6775302530253026,
165
+ "f1_weighted": 0.755491929789994
166
+ },
167
+ {
168
+ "accuracy": 0.6865671641791045,
169
+ "ap": 0.3198739537149678,
170
+ "ap_weighted": 0.3198739537149678,
171
+ "f1": 0.6309992132179386,
172
+ "f1_weighted": 0.7181981516493064
173
+ }
174
+ ]
175
+ },
176
+ {
177
+ "accuracy": 0.6721627408993577,
178
+ "ap": 0.795095575068442,
179
+ "ap_weighted": 0.795095575068442,
180
+ "f1": 0.6547112844723946,
181
+ "f1_weighted": 0.6844192203709608,
182
+ "hf_subset": "de",
183
+ "languages": [
184
+ "deu-Latn"
185
+ ],
186
+ "main_score": 0.6721627408993577,
187
+ "scores_per_experiment": [
188
+ {
189
+ "accuracy": 0.6755888650963597,
190
+ "ap": 0.8088112599310483,
191
+ "ap_weighted": 0.8088112599310483,
192
+ "f1": 0.6637856987564911,
193
+ "f1_weighted": 0.6884711780730164
194
+ },
195
+ {
196
+ "accuracy": 0.6659528907922913,
197
+ "ap": 0.7894827003268342,
198
+ "ap_weighted": 0.7894827003268342,
199
+ "f1": 0.6481667503766951,
200
+ "f1_weighted": 0.6791654522438769
201
+ },
202
+ {
203
+ "accuracy": 0.7376873661670236,
204
+ "ap": 0.8150856891639244,
205
+ "ap_weighted": 0.8150856891639244,
206
+ "f1": 0.7103370055456608,
207
+ "f1_weighted": 0.7452158626446809
208
+ },
209
+ {
210
+ "accuracy": 0.6766595289079229,
211
+ "ap": 0.8006179765338808,
212
+ "ap_weighted": 0.8006179765338808,
213
+ "f1": 0.6611209626650135,
214
+ "f1_weighted": 0.6895565388895377
215
+ },
216
+ {
217
+ "accuracy": 0.6605995717344754,
218
+ "ap": 0.794761983198814,
219
+ "ap_weighted": 0.794761983198814,
220
+ "f1": 0.6470501283278517,
221
+ "f1_weighted": 0.6741490151410989
222
+ },
223
+ {
224
+ "accuracy": 0.6788008565310493,
225
+ "ap": 0.80360920084555,
226
+ "ap_weighted": 0.80360920084555,
227
+ "f1": 0.6640046046478164,
228
+ "f1_weighted": 0.6916343403073227
229
+ },
230
+ {
231
+ "accuracy": 0.6488222698072805,
232
+ "ap": 0.7666095355898597,
233
+ "ap_weighted": 0.7666095355898597,
234
+ "f1": 0.6226448561292866,
235
+ "f1_weighted": 0.6615917398941068
236
+ },
237
+ {
238
+ "accuracy": 0.6241970021413277,
239
+ "ap": 0.780018069472748,
240
+ "ap_weighted": 0.780018069472748,
241
+ "f1": 0.6143832595232634,
242
+ "f1_weighted": 0.6384894997797165
243
+ },
244
+ {
245
+ "accuracy": 0.7077087794432548,
246
+ "ap": 0.7970014483983499,
247
+ "ap_weighted": 0.7970014483983499,
248
+ "f1": 0.679520679916594,
249
+ "f1_weighted": 0.7167656056449616
250
+ },
251
+ {
252
+ "accuracy": 0.645610278372591,
253
+ "ap": 0.7949578872234118,
254
+ "ap_weighted": 0.7949578872234118,
255
+ "f1": 0.6360988988352725,
256
+ "f1_weighted": 0.6591529710912895
257
+ }
258
+ ]
259
+ },
260
+ {
261
+ "accuracy": 0.6981798715203427,
262
+ "ap": 0.17787887304839772,
263
+ "ap_weighted": 0.17787887304839772,
264
+ "f1": 0.5634555489747263,
265
+ "f1_weighted": 0.7551132420811811,
266
+ "hf_subset": "ja",
267
+ "languages": [
268
+ "jpn-Jpan"
269
+ ],
270
+ "main_score": 0.6981798715203427,
271
+ "scores_per_experiment": [
272
+ {
273
+ "accuracy": 0.6531049250535332,
274
+ "ap": 0.1774938606777472,
275
+ "ap_weighted": 0.1774938606777472,
276
+ "f1": 0.5420155095072916,
277
+ "f1_weighted": 0.7212075667579683
278
+ },
279
+ {
280
+ "accuracy": 0.7237687366167024,
281
+ "ap": 0.17395357378029777,
282
+ "ap_weighted": 0.17395357378029777,
283
+ "f1": 0.5738327674023769,
284
+ "f1_weighted": 0.7746495348338381
285
+ },
286
+ {
287
+ "accuracy": 0.6477516059957173,
288
+ "ap": 0.16258379892298724,
289
+ "ap_weighted": 0.16258379892298724,
290
+ "f1": 0.5303354746609565,
291
+ "f1_weighted": 0.7168938674883496
292
+ },
293
+ {
294
+ "accuracy": 0.7687366167023555,
295
+ "ap": 0.21514537914425577,
296
+ "ap_weighted": 0.21514537914425577,
297
+ "f1": 0.6202823263692829,
298
+ "f1_weighted": 0.808900619908906
299
+ },
300
+ {
301
+ "accuracy": 0.7612419700214133,
302
+ "ap": 0.22796012725081244,
303
+ "ap_weighted": 0.22796012725081244,
304
+ "f1": 0.6234490563718179,
305
+ "f1_weighted": 0.8044089535895167
306
+ },
307
+ {
308
+ "accuracy": 0.7312633832976445,
309
+ "ap": 0.16756908567698592,
310
+ "ap_weighted": 0.16756908567698592,
311
+ "f1": 0.5726647162296412,
312
+ "f1_weighted": 0.779484067661027
313
+ },
314
+ {
315
+ "accuracy": 0.7098501070663812,
316
+ "ap": 0.17992121727849117,
317
+ "ap_weighted": 0.17992121727849117,
318
+ "f1": 0.5712802445820171,
319
+ "f1_weighted": 0.7649127077899232
320
+ },
321
+ {
322
+ "accuracy": 0.6209850107066381,
323
+ "ap": 0.14479664784221485,
324
+ "ap_weighted": 0.14479664784221485,
325
+ "f1": 0.5050744552819242,
326
+ "f1_weighted": 0.6953524024614325
327
+ },
328
+ {
329
+ "accuracy": 0.6252676659528907,
330
+ "ap": 0.1478010382279027,
331
+ "ap_weighted": 0.1478010382279027,
332
+ "f1": 0.5093359750240154,
333
+ "f1_weighted": 0.6988102364099747
334
+ },
335
+ {
336
+ "accuracy": 0.7398286937901499,
337
+ "ap": 0.18156400168228218,
338
+ "ap_weighted": 0.18156400168228218,
339
+ "f1": 0.5862849643179394,
340
+ "f1_weighted": 0.7865124639108746
341
+ }
342
+ ]
343
+ }
344
+ ],
345
+ "validation": [
346
+ {
347
+ "accuracy": 0.7310810810810812,
348
+ "ap": 0.2129382739457578,
349
+ "ap_weighted": 0.2129382739457578,
350
+ "f1": 0.59986546354232,
351
+ "f1_weighted": 0.7823826716962354,
352
+ "hf_subset": "en-ext",
353
+ "languages": [
354
+ "eng-Latn"
355
+ ],
356
+ "main_score": 0.7310810810810812,
357
+ "scores_per_experiment": [
358
+ {
359
+ "accuracy": 0.8033033033033034,
360
+ "ap": 0.23870186074984864,
361
+ "ap_weighted": 0.23870186074984864,
362
+ "f1": 0.6497199638663054,
363
+ "f1_weighted": 0.8349951035045884
364
+ },
365
+ {
366
+ "accuracy": 0.6966966966966966,
367
+ "ap": 0.2060143713717948,
368
+ "ap_weighted": 0.2060143713717948,
369
+ "f1": 0.5791298036614818,
370
+ "f1_weighted": 0.7568161306351587
371
+ },
372
+ {
373
+ "accuracy": 0.7237237237237237,
374
+ "ap": 0.22900259172759332,
375
+ "ap_weighted": 0.22900259172759332,
376
+ "f1": 0.6041860465116279,
377
+ "f1_weighted": 0.7779402658472426
378
+ },
379
+ {
380
+ "accuracy": 0.7057057057057057,
381
+ "ap": 0.17787823296156072,
382
+ "ap_weighted": 0.17787823296156072,
383
+ "f1": 0.5680190614865313,
384
+ "f1_weighted": 0.7628310155413206
385
+ },
386
+ {
387
+ "accuracy": 0.6966966966966966,
388
+ "ap": 0.2023006863092151,
389
+ "ap_weighted": 0.2023006863092151,
390
+ "f1": 0.5772624434389141,
391
+ "f1_weighted": 0.7567512082217964
392
+ },
393
+ {
394
+ "accuracy": 0.6921921921921922,
395
+ "ap": 0.19643979618497273,
396
+ "ap_weighted": 0.19643979618497273,
397
+ "f1": 0.5719351491000072,
398
+ "f1_weighted": 0.7531723925137253
399
+ },
400
+ {
401
+ "accuracy": 0.7492492492492493,
402
+ "ap": 0.20397781271981696,
403
+ "ap_weighted": 0.20397781271981696,
404
+ "f1": 0.6043991221674071,
405
+ "f1_weighted": 0.7956156670992681
406
+ },
407
+ {
408
+ "accuracy": 0.7462462462462462,
409
+ "ap": 0.2143225314867106,
410
+ "ap_weighted": 0.2143225314867106,
411
+ "f1": 0.6085500346049644,
412
+ "f1_weighted": 0.794004172815501
413
+ },
414
+ {
415
+ "accuracy": 0.7117117117117117,
416
+ "ap": 0.21401006145007656,
417
+ "ap_weighted": 0.21401006145007656,
418
+ "f1": 0.5908253123080074,
419
+ "f1_weighted": 0.7684815677852745
420
+ },
421
+ {
422
+ "accuracy": 0.7852852852852853,
423
+ "ap": 0.2467347944959885,
424
+ "ap_weighted": 0.2467347944959885,
425
+ "f1": 0.6446276982779529,
426
+ "f1_weighted": 0.8232191929984798
427
+ }
428
+ ]
429
+ },
430
+ {
431
+ "accuracy": 0.7259701492537314,
432
+ "ap": 0.3185126792007951,
433
+ "ap_weighted": 0.3185126792007951,
434
+ "f1": 0.6515051262860765,
435
+ "f1_weighted": 0.7561520624546738,
436
+ "hf_subset": "en",
437
+ "languages": [
438
+ "eng-Latn"
439
+ ],
440
+ "main_score": 0.7259701492537314,
441
+ "scores_per_experiment": [
442
+ {
443
+ "accuracy": 0.7223880597014926,
444
+ "ap": 0.3033392873361388,
445
+ "ap_weighted": 0.3033392873361388,
446
+ "f1": 0.6429676487778045,
447
+ "f1_weighted": 0.7530503702479797
448
+ },
449
+ {
450
+ "accuracy": 0.746268656716418,
451
+ "ap": 0.33721445821291424,
452
+ "ap_weighted": 0.33721445821291424,
453
+ "f1": 0.6712842712842713,
454
+ "f1_weighted": 0.7739191488445221
455
+ },
456
+ {
457
+ "accuracy": 0.6447761194029851,
458
+ "ap": 0.2742702982719936,
459
+ "ap_weighted": 0.2742702982719936,
460
+ "f1": 0.5883798490433562,
461
+ "f1_weighted": 0.6879829394365717
462
+ },
463
+ {
464
+ "accuracy": 0.7791044776119403,
465
+ "ap": 0.37362421608432933,
466
+ "ap_weighted": 0.37362421608432933,
467
+ "f1": 0.7036957353222413,
468
+ "f1_weighted": 0.8014147563958749
469
+ },
470
+ {
471
+ "accuracy": 0.6925373134328359,
472
+ "ap": 0.3021184037026045,
473
+ "ap_weighted": 0.3021184037026045,
474
+ "f1": 0.627476383265857,
475
+ "f1_weighted": 0.7292502668842024
476
+ },
477
+ {
478
+ "accuracy": 0.7671641791044777,
479
+ "ap": 0.340201512332238,
480
+ "ap_weighted": 0.340201512332238,
481
+ "f1": 0.6824876057159521,
482
+ "f1_weighted": 0.7896793373465133
483
+ },
484
+ {
485
+ "accuracy": 0.7522388059701492,
486
+ "ap": 0.321613171653816,
487
+ "ap_weighted": 0.321613171653816,
488
+ "f1": 0.6663066306630663,
489
+ "f1_weighted": 0.7770074917939556
490
+ },
491
+ {
492
+ "accuracy": 0.755223880597015,
493
+ "ap": 0.30866700977869277,
494
+ "ap_weighted": 0.30866700977869277,
495
+ "f1": 0.660428204113924,
496
+ "f1_weighted": 0.7777177699319857
497
+ },
498
+ {
499
+ "accuracy": 0.7104477611940299,
500
+ "ap": 0.3191982787643323,
501
+ "ap_weighted": 0.3191982787643323,
502
+ "f1": 0.6448206888259791,
503
+ "f1_weighted": 0.7446285280523895
504
+ },
505
+ {
506
+ "accuracy": 0.6895522388059702,
507
+ "ap": 0.3048801558708918,
508
+ "ap_weighted": 0.3048801558708918,
509
+ "f1": 0.6272042458483137,
510
+ "f1_weighted": 0.7268700156127427
511
+ }
512
+ ]
513
+ },
514
+ {
515
+ "accuracy": 0.6800429184549357,
516
+ "ap": 0.8006844296633766,
517
+ "ap_weighted": 0.8006844296633766,
518
+ "f1": 0.6618664655868342,
519
+ "f1_weighted": 0.6921755542096039,
520
+ "hf_subset": "de",
521
+ "languages": [
522
+ "deu-Latn"
523
+ ],
524
+ "main_score": 0.6800429184549357,
525
+ "scores_per_experiment": [
526
+ {
527
+ "accuracy": 0.6974248927038627,
528
+ "ap": 0.8157005774778237,
529
+ "ap_weighted": 0.8157005774778237,
530
+ "f1": 0.6818835422446223,
531
+ "f1_weighted": 0.7096467314145275
532
+ },
533
+ {
534
+ "accuracy": 0.7081545064377682,
535
+ "ap": 0.8231442720369759,
536
+ "ap_weighted": 0.8231442720369759,
537
+ "f1": 0.6928564505185616,
538
+ "f1_weighted": 0.7199222417602349
539
+ },
540
+ {
541
+ "accuracy": 0.7424892703862661,
542
+ "ap": 0.8268789579519193,
543
+ "ap_weighted": 0.8268789579519193,
544
+ "f1": 0.7192771084337349,
545
+ "f1_weighted": 0.7511505248461658
546
+ },
547
+ {
548
+ "accuracy": 0.648068669527897,
549
+ "ap": 0.7858415178101053,
550
+ "ap_weighted": 0.7858415178101053,
551
+ "f1": 0.6331413210445469,
552
+ "f1_weighted": 0.6623608116928067
553
+ },
554
+ {
555
+ "accuracy": 0.6716738197424893,
556
+ "ap": 0.7994160889182349,
557
+ "ap_weighted": 0.7994160889182349,
558
+ "f1": 0.6561549034264909,
559
+ "f1_weighted": 0.6849981418319829
560
+ },
561
+ {
562
+ "accuracy": 0.628755364806867,
563
+ "ap": 0.7755937818479337,
564
+ "ap_weighted": 0.7755937818479337,
565
+ "f1": 0.614701173321863,
566
+ "f1_weighted": 0.6437570298751296
567
+ },
568
+ {
569
+ "accuracy": 0.6673819742489271,
570
+ "ap": 0.7674229303215767,
571
+ "ap_weighted": 0.7674229303215767,
572
+ "f1": 0.631725734330626,
573
+ "f1_weighted": 0.6769722732614357
574
+ },
575
+ {
576
+ "accuracy": 0.6630901287553648,
577
+ "ap": 0.8045163420270716,
578
+ "ap_weighted": 0.8045163420270716,
579
+ "f1": 0.6520519149462349,
580
+ "f1_weighted": 0.6765221720652697
581
+ },
582
+ {
583
+ "accuracy": 0.7188841201716738,
584
+ "ap": 0.8087963693953617,
585
+ "ap_weighted": 0.8087963693953617,
586
+ "f1": 0.693129778666975,
587
+ "f1_weighted": 0.7282319922733793
588
+ },
589
+ {
590
+ "accuracy": 0.6545064377682404,
591
+ "ap": 0.7995334588467635,
592
+ "ap_weighted": 0.7995334588467635,
593
+ "f1": 0.6437427289346851,
594
+ "f1_weighted": 0.6681936230751069
595
+ }
596
+ ]
597
+ },
598
+ {
599
+ "accuracy": 0.7027896995708154,
600
+ "ap": 0.16581375762714107,
601
+ "ap_weighted": 0.16581375762714107,
602
+ "f1": 0.5573441057326718,
603
+ "f1_weighted": 0.7603183529182594,
604
+ "hf_subset": "ja",
605
+ "languages": [
606
+ "jpn-Jpan"
607
+ ],
608
+ "main_score": 0.7027896995708154,
609
+ "scores_per_experiment": [
610
+ {
611
+ "accuracy": 0.6738197424892703,
612
+ "ap": 0.16553602503070255,
613
+ "ap_weighted": 0.16553602503070255,
614
+ "f1": 0.5449335697581785,
615
+ "f1_weighted": 0.7393022334736153
616
+ },
617
+ {
618
+ "accuracy": 0.7446351931330472,
619
+ "ap": 0.1814416616274779,
620
+ "ap_weighted": 0.1814416616274779,
621
+ "f1": 0.5885805010869001,
622
+ "f1_weighted": 0.7919409708613218
623
+ },
624
+ {
625
+ "accuracy": 0.6587982832618026,
626
+ "ap": 0.15581054311718076,
627
+ "ap_weighted": 0.15581054311718076,
628
+ "f1": 0.5313679976218637,
629
+ "f1_weighted": 0.7274952685244858
630
+ },
631
+ {
632
+ "accuracy": 0.7575107296137339,
633
+ "ap": 0.20589634189972036,
634
+ "ap_weighted": 0.20589634189972036,
635
+ "f1": 0.6093243413682329,
636
+ "f1_weighted": 0.8024313420783981
637
+ },
638
+ {
639
+ "accuracy": 0.740343347639485,
640
+ "ap": 0.20148837913346215,
641
+ "ap_weighted": 0.20148837913346215,
642
+ "f1": 0.5984675311727802,
643
+ "f1_weighted": 0.7900254927559845
644
+ },
645
+ {
646
+ "accuracy": 0.7360515021459227,
647
+ "ap": 0.17190741777418395,
648
+ "ap_weighted": 0.17190741777418395,
649
+ "f1": 0.5783301822248053,
650
+ "f1_weighted": 0.7853048266125173
651
+ },
652
+ {
653
+ "accuracy": 0.6995708154506438,
654
+ "ap": 0.13539156191111962,
655
+ "ap_weighted": 0.13539156191111962,
656
+ "f1": 0.5335735529627087,
657
+ "f1_weighted": 0.7568936111299306
658
+ },
659
+ {
660
+ "accuracy": 0.6223175965665236,
661
+ "ap": 0.11755714947657096,
662
+ "ap_weighted": 0.11755714947657096,
663
+ "f1": 0.48284907183212267,
664
+ "f1_weighted": 0.6983913373307422
665
+ },
666
+ {
667
+ "accuracy": 0.648068669527897,
668
+ "ap": 0.15655956659662332,
669
+ "ap_weighted": 0.15655956659662332,
670
+ "f1": 0.5266691440604484,
671
+ "f1_weighted": 0.7190565276402185
672
+ },
673
+ {
674
+ "accuracy": 0.7467811158798283,
675
+ "ap": 0.16654892970436916,
676
+ "ap_weighted": 0.16654892970436916,
677
+ "f1": 0.5793451652386781,
678
+ "f1_weighted": 0.7923419187753794
679
+ }
680
+ ]
681
+ }
682
+ ]
683
+ },
684
+ "task_name": "AmazonCounterfactualClassification"
685
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/ArXivHierarchicalClusteringP2P.json ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "0bbdb47bcbe3a90093699aefeed338a0f28a7ee8",
3
+ "evaluation_time": 51.224119424819946,
4
+ "kg_co2_emissions": 0.0014534297116996133,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "eng-Latn"
12
+ ],
13
+ "main_score": 0.5799437291346836,
14
+ "v_measure": 0.5799437291346836,
15
+ "v_measure_std": 0.019586214172126987,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.5563952985750681,
19
+ 0.5737938350395156,
20
+ 0.5818783486470299,
21
+ 0.6021245992368816,
22
+ 0.5878960528097944,
23
+ 0.5459810296389425,
24
+ 0.5469767390321151,
25
+ 0.5400553231575698,
26
+ 0.5835400971394934,
27
+ 0.6011917503064965
28
+ ],
29
+ "Level 1": [
30
+ 0.5722624555605492,
31
+ 0.5835437421749773,
32
+ 0.6024746455136257,
33
+ 0.5791135066556228,
34
+ 0.5743916128426589,
35
+ 0.6133717997883887,
36
+ 0.5760951702244291,
37
+ 0.5919391529590488,
38
+ 0.592210813722662,
39
+ 0.5936386096688032
40
+ ]
41
+ }
42
+ }
43
+ ]
44
+ },
45
+ "task_name": "ArXivHierarchicalClusteringP2P"
46
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/ArXivHierarchicalClusteringS2S.json ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "b73bd54100e5abfa6e3a23dcafb46fe4d2438dc3",
3
+ "evaluation_time": 50.95248889923096,
4
+ "kg_co2_emissions": 0.0014068091169269724,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "eng-Latn"
12
+ ],
13
+ "main_score": 0.5794172192774746,
14
+ "v_measure": 0.5794172192774746,
15
+ "v_measure_std": 0.01933516999440283,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.5809116082725915,
19
+ 0.537591933360143,
20
+ 0.6165251723573371,
21
+ 0.5847551538654004,
22
+ 0.5935653815789085,
23
+ 0.6033641463849896,
24
+ 0.6049640449984198,
25
+ 0.6012310127246141,
26
+ 0.5874152564409058,
27
+ 0.5647950366659332
28
+ ],
29
+ "Level 1": [
30
+ 0.5518188214372792,
31
+ 0.5734923464157481,
32
+ 0.5863807272786228,
33
+ 0.5703137122761907,
34
+ 0.5680665318247939,
35
+ 0.5760433091251643,
36
+ 0.5758883481900802,
37
+ 0.5531404061279529,
38
+ 0.5654679274473484,
39
+ 0.5926135087770682
40
+ ]
41
+ }
42
+ }
43
+ ]
44
+ },
45
+ "task_name": "ArXivHierarchicalClusteringS2S"
46
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/ArguAna.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "c22ab2a51041ffd869aaddef7af8d8215647e41a",
3
+ "evaluation_time": 20.405173540115356,
4
+ "kg_co2_emissions": 0.0016107156433406498,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "eng-Latn"
12
+ ],
13
+ "main_score": 0.58284,
14
+ "map_at_1": 0.32859,
15
+ "map_at_10": 0.49445,
16
+ "map_at_100": 0.50215,
17
+ "map_at_1000": 0.50218,
18
+ "map_at_20": 0.50082,
19
+ "map_at_3": 0.4463,
20
+ "map_at_5": 0.47664,
21
+ "mrr_at_1": 0.3335704125177809,
22
+ "mrr_at_10": 0.49637805098331467,
23
+ "mrr_at_100": 0.504013083711599,
24
+ "mrr_at_1000": 0.5040340217435418,
25
+ "mrr_at_20": 0.5026801733193299,
26
+ "mrr_at_3": 0.4481981981981984,
27
+ "mrr_at_5": 0.47863916548127156,
28
+ "nauc_map_at_1000_diff1": 0.13658806599507958,
29
+ "nauc_map_at_1000_max": -0.07059134703150137,
30
+ "nauc_map_at_1000_std": -0.16173024977407902,
31
+ "nauc_map_at_100_diff1": 0.13660568418415456,
32
+ "nauc_map_at_100_max": -0.0705462225745659,
33
+ "nauc_map_at_100_std": -0.16167304663901194,
34
+ "nauc_map_at_10_diff1": 0.13359964947628822,
35
+ "nauc_map_at_10_max": -0.07192420081311536,
36
+ "nauc_map_at_10_std": -0.1635040285765837,
37
+ "nauc_map_at_1_diff1": 0.16964993878042603,
38
+ "nauc_map_at_1_max": -0.09457228545018442,
39
+ "nauc_map_at_1_std": -0.1710577503642865,
40
+ "nauc_map_at_20_diff1": 0.1367837171115754,
41
+ "nauc_map_at_20_max": -0.06968337357595565,
42
+ "nauc_map_at_20_std": -0.16202692857210246,
43
+ "nauc_map_at_3_diff1": 0.13455661155321752,
44
+ "nauc_map_at_3_max": -0.06835979578611119,
45
+ "nauc_map_at_3_std": -0.16416136969776896,
46
+ "nauc_map_at_5_diff1": 0.13435164210372744,
47
+ "nauc_map_at_5_max": -0.07085685085234539,
48
+ "nauc_map_at_5_std": -0.16342146452734654,
49
+ "nauc_mrr_at_1000_diff1": 0.12324596977761433,
50
+ "nauc_mrr_at_1000_max": -0.07336072998726775,
51
+ "nauc_mrr_at_1000_std": -0.16272470779380507,
52
+ "nauc_mrr_at_100_diff1": 0.12326448811570562,
53
+ "nauc_mrr_at_100_max": -0.0733154907363886,
54
+ "nauc_mrr_at_100_std": -0.16266753467161799,
55
+ "nauc_mrr_at_10_diff1": 0.12050832359711862,
56
+ "nauc_mrr_at_10_max": -0.07451952594244411,
57
+ "nauc_mrr_at_10_std": -0.16444997838642567,
58
+ "nauc_mrr_at_1_diff1": 0.15506653756109892,
59
+ "nauc_mrr_at_1_max": -0.08967493506534223,
60
+ "nauc_mrr_at_1_std": -0.17264771571098758,
61
+ "nauc_mrr_at_20_diff1": 0.12352084358903677,
62
+ "nauc_mrr_at_20_max": -0.07242290469111866,
63
+ "nauc_mrr_at_20_std": -0.16301269967494128,
64
+ "nauc_mrr_at_3_diff1": 0.12260890144708615,
65
+ "nauc_mrr_at_3_max": -0.07209484646208202,
66
+ "nauc_mrr_at_3_std": -0.16547400788739827,
67
+ "nauc_mrr_at_5_diff1": 0.12168677307417618,
68
+ "nauc_mrr_at_5_max": -0.0739712953990007,
69
+ "nauc_mrr_at_5_std": -0.16426796810567798,
70
+ "nauc_ndcg_at_1000_diff1": 0.13197806637980636,
71
+ "nauc_ndcg_at_1000_max": -0.06513090765007161,
72
+ "nauc_ndcg_at_1000_std": -0.15719884762474612,
73
+ "nauc_ndcg_at_100_diff1": 0.132410519995102,
74
+ "nauc_ndcg_at_100_max": -0.06365960538240219,
75
+ "nauc_ndcg_at_100_std": -0.15542622986267826,
76
+ "nauc_ndcg_at_10_diff1": 0.12138974058465575,
77
+ "nauc_ndcg_at_10_max": -0.06628236833463177,
78
+ "nauc_ndcg_at_10_std": -0.16304243268068908,
79
+ "nauc_ndcg_at_1_diff1": 0.16964993878042603,
80
+ "nauc_ndcg_at_1_max": -0.09457228545018442,
81
+ "nauc_ndcg_at_1_std": -0.1710577503642865,
82
+ "nauc_ndcg_at_20_diff1": 0.13336014273499616,
83
+ "nauc_ndcg_at_20_max": -0.05666547073734123,
84
+ "nauc_ndcg_at_20_std": -0.15792684918635885,
85
+ "nauc_ndcg_at_3_diff1": 0.12634493553336715,
86
+ "nauc_ndcg_at_3_max": -0.05990506599933991,
87
+ "nauc_ndcg_at_3_std": -0.1638283363586836,
88
+ "nauc_ndcg_at_5_diff1": 0.12496658332036976,
89
+ "nauc_ndcg_at_5_max": -0.06357223620716256,
90
+ "nauc_ndcg_at_5_std": -0.16177960284316031,
91
+ "nauc_precision_at_1000_diff1": -0.49660634392643604,
92
+ "nauc_precision_at_1000_max": 0.2846264612085611,
93
+ "nauc_precision_at_1000_std": 0.8291104777423145,
94
+ "nauc_precision_at_100_diff1": -0.07989323555534796,
95
+ "nauc_precision_at_100_max": 0.4859660340375284,
96
+ "nauc_precision_at_100_std": 0.7843291141151117,
97
+ "nauc_precision_at_10_diff1": 0.03804428089643328,
98
+ "nauc_precision_at_10_max": -0.03358725569485907,
99
+ "nauc_precision_at_10_std": -0.163246469509831,
100
+ "nauc_precision_at_1_diff1": 0.16964993878042603,
101
+ "nauc_precision_at_1_max": -0.09457228545018442,
102
+ "nauc_precision_at_1_std": -0.1710577503642865,
103
+ "nauc_precision_at_20_diff1": 0.12130415628403307,
104
+ "nauc_precision_at_20_max": 0.19914240866559715,
105
+ "nauc_precision_at_20_std": -0.07948410554590564,
106
+ "nauc_precision_at_3_diff1": 0.10199579857623743,
107
+ "nauc_precision_at_3_max": -0.03373490319262838,
108
+ "nauc_precision_at_3_std": -0.1634479385221895,
109
+ "nauc_precision_at_5_diff1": 0.0893339314004429,
110
+ "nauc_precision_at_5_max": -0.035675555356901076,
111
+ "nauc_precision_at_5_std": -0.15527056150329452,
112
+ "nauc_recall_at_1000_diff1": -0.49660634392647107,
113
+ "nauc_recall_at_1000_max": 0.28462646120853596,
114
+ "nauc_recall_at_1000_std": 0.8291104777423081,
115
+ "nauc_recall_at_100_diff1": -0.07989323555533344,
116
+ "nauc_recall_at_100_max": 0.4859660340375217,
117
+ "nauc_recall_at_100_std": 0.7843291141151,
118
+ "nauc_recall_at_10_diff1": 0.038044280896433476,
119
+ "nauc_recall_at_10_max": -0.03358725569485779,
120
+ "nauc_recall_at_10_std": -0.16324646950982932,
121
+ "nauc_recall_at_1_diff1": 0.16964993878042603,
122
+ "nauc_recall_at_1_max": -0.09457228545018442,
123
+ "nauc_recall_at_1_std": -0.1710577503642865,
124
+ "nauc_recall_at_20_diff1": 0.12130415628403349,
125
+ "nauc_recall_at_20_max": 0.19914240866559677,
126
+ "nauc_recall_at_20_std": -0.07948410554590614,
127
+ "nauc_recall_at_3_diff1": 0.10199579857623747,
128
+ "nauc_recall_at_3_max": -0.03373490319262785,
129
+ "nauc_recall_at_3_std": -0.1634479385221889,
130
+ "nauc_recall_at_5_diff1": 0.0893339314004441,
131
+ "nauc_recall_at_5_max": -0.03567555535689934,
132
+ "nauc_recall_at_5_std": -0.1552705615032935,
133
+ "ndcg_at_1": 0.32859,
134
+ "ndcg_at_10": 0.58284,
135
+ "ndcg_at_100": 0.61369,
136
+ "ndcg_at_1000": 0.61417,
137
+ "ndcg_at_20": 0.60534,
138
+ "ndcg_at_3": 0.48559,
139
+ "ndcg_at_5": 0.54017,
140
+ "precision_at_1": 0.32859,
141
+ "precision_at_10": 0.08627,
142
+ "precision_at_100": 0.00993,
143
+ "precision_at_1000": 0.001,
144
+ "precision_at_20": 0.04751,
145
+ "precision_at_3": 0.19986,
146
+ "precision_at_5": 0.14637,
147
+ "recall_at_1": 0.32859,
148
+ "recall_at_10": 0.86273,
149
+ "recall_at_100": 0.99289,
150
+ "recall_at_1000": 0.99644,
151
+ "recall_at_20": 0.95021,
152
+ "recall_at_3": 0.59957,
153
+ "recall_at_5": 0.73186
154
+ }
155
+ ]
156
+ },
157
+ "task_name": "ArguAna"
158
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/ArmenianParaphrasePC.json ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "f43b4f32987048043a8b31e5e26be4d360c2438f",
3
+ "evaluation_time": 1.960935354232788,
4
+ "kg_co2_emissions": 0.00010954095746479199,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "cosine_accuracy": 0.9,
10
+ "cosine_accuracy_threshold": 0.7670648097991943,
11
+ "cosine_ap": 0.9450463621366145,
12
+ "cosine_f1": 0.9326614750343564,
13
+ "cosine_f1_threshold": 0.7670648097991943,
14
+ "cosine_precision": 0.8760757314974182,
15
+ "cosine_recall": 0.9970617042115573,
16
+ "dot_accuracy": 0.9,
17
+ "dot_accuracy_threshold": 0.7670649290084839,
18
+ "dot_ap": 0.9450456087886201,
19
+ "dot_f1": 0.9326614750343564,
20
+ "dot_f1_threshold": 0.7670649290084839,
21
+ "dot_precision": 0.8760757314974182,
22
+ "dot_recall": 0.9970617042115573,
23
+ "euclidean_accuracy": 0.9,
24
+ "euclidean_accuracy_threshold": 0.6825469136238098,
25
+ "euclidean_ap": 0.9450463621366145,
26
+ "euclidean_f1": 0.9326614750343564,
27
+ "euclidean_f1_threshold": 0.6825469136238098,
28
+ "euclidean_precision": 0.8760757314974182,
29
+ "euclidean_recall": 0.9970617042115573,
30
+ "hf_subset": "default",
31
+ "languages": [
32
+ "hye-Armn"
33
+ ],
34
+ "main_score": 0.945295312187127,
35
+ "manhattan_accuracy": 0.8993197278911564,
36
+ "manhattan_accuracy_threshold": 14.593847274780273,
37
+ "manhattan_ap": 0.945295312187127,
38
+ "manhattan_f1": 0.9321723189734189,
39
+ "manhattan_f1_threshold": 14.593847274780273,
40
+ "manhattan_precision": 0.875968992248062,
41
+ "manhattan_recall": 0.9960822722820764,
42
+ "max_accuracy": 0.9,
43
+ "max_ap": 0.945295312187127,
44
+ "max_f1": 0.9326614750343564,
45
+ "max_precision": 0.8760757314974182,
46
+ "max_recall": 0.9970617042115573,
47
+ "similarity_accuracy": 0.9,
48
+ "similarity_accuracy_threshold": 0.7670648097991943,
49
+ "similarity_ap": 0.9450463621366145,
50
+ "similarity_f1": 0.9326614750343564,
51
+ "similarity_f1_threshold": 0.7670648097991943,
52
+ "similarity_precision": 0.8760757314974182,
53
+ "similarity_recall": 0.9970617042115573
54
+ }
55
+ ]
56
+ },
57
+ "task_name": "ArmenianParaphrasePC"
58
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/BUCC.v2.json ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "1739dc11ffe9b7bfccd7f3d585aeb4c544fc6677",
3
+ "evaluation_time": 35.74144887924194,
4
+ "kg_co2_emissions": 0.0019133463265158886,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.9887739379264803,
10
+ "f1": 0.9852336928608115,
11
+ "hf_subset": "fr-en",
12
+ "languages": [
13
+ "fra-Latn",
14
+ "eng-Latn"
15
+ ],
16
+ "main_score": 0.9852336928608115,
17
+ "precision": 0.9834910851860005,
18
+ "recall": 0.9887739379264803
19
+ },
20
+ {
21
+ "accuracy": 0.9783858676827156,
22
+ "f1": 0.9715390832467382,
23
+ "hf_subset": "ru-en",
24
+ "languages": [
25
+ "rus-Cyrl",
26
+ "eng-Latn"
27
+ ],
28
+ "main_score": 0.9715390832467382,
29
+ "precision": 0.9681445560558827,
30
+ "recall": 0.9783858676827156
31
+ },
32
+ {
33
+ "accuracy": 0.9921011058451816,
34
+ "f1": 0.989468141126909,
35
+ "hf_subset": "zh-en",
36
+ "languages": [
37
+ "cmn-Hans",
38
+ "eng-Latn"
39
+ ],
40
+ "main_score": 0.989468141126909,
41
+ "precision": 0.9881516587677726,
42
+ "recall": 0.9921011058451816
43
+ },
44
+ {
45
+ "accuracy": 0.9930062630480166,
46
+ "f1": 0.9907794015309672,
47
+ "hf_subset": "de-en",
48
+ "languages": [
49
+ "deu-Latn",
50
+ "eng-Latn"
51
+ ],
52
+ "main_score": 0.9907794015309672,
53
+ "precision": 0.9896833681280447,
54
+ "recall": 0.9930062630480166
55
+ }
56
+ ]
57
+ },
58
+ "task_name": "BUCC.v2"
59
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/BelebeleRetrieval.json ADDED
The diff for this file is too large to render. See raw diff
 
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/BibleNLPBitextMining.json ADDED
The diff for this file is too large to render. See raw diff
 
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/BigPatentClustering.v2.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "58a863a958586a5d6ba51088b94ac74a46aa864f",
3
+ "evaluation_time": 38.18953275680542,
4
+ "kg_co2_emissions": 0.0015398585609415548,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "eng-Latn"
12
+ ],
13
+ "main_score": 0.32659002018707006,
14
+ "v_measure": 0.32659002018707006,
15
+ "v_measure_std": 0.04497780303397157,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.34559080701462314,
19
+ 0.2786278552775762,
20
+ 0.3272863449258742,
21
+ 0.4252538476469551,
22
+ 0.25404001821387406,
23
+ 0.2926740589990323,
24
+ 0.34033377800756404,
25
+ 0.32875327105933083,
26
+ 0.3140536274501474,
27
+ 0.35928659327572365
28
+ ]
29
+ }
30
+ }
31
+ ]
32
+ },
33
+ "task_name": "BigPatentClustering.v2"
34
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/BiorxivClusteringP2P.v2.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "f5dbc242e11dd8e24def4c4268607a49e02946dc",
3
+ "evaluation_time": 49.459433794021606,
4
+ "kg_co2_emissions": 0.0018011339343426348,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "eng-Latn"
12
+ ],
13
+ "main_score": 0.39660852705122057,
14
+ "v_measure": 0.39660852705122057,
15
+ "v_measure_std": 0.010342150148487406,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.38569552578222527,
19
+ 0.38375510019155606,
20
+ 0.41022877426195437,
21
+ 0.4172682039372059,
22
+ 0.391417100325845,
23
+ 0.40251627301859333,
24
+ 0.3941083669888483,
25
+ 0.4009748415543495,
26
+ 0.39020725283338653,
27
+ 0.3899138316182415
28
+ ]
29
+ }
30
+ }
31
+ ]
32
+ },
33
+ "task_name": "BiorxivClusteringP2P.v2"
34
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/BornholmBitextMining.json ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "3bc5cfb4ec514264fe2db5615fac9016f7251552",
3
+ "evaluation_time": 0.8987767696380615,
4
+ "kg_co2_emissions": 3.6087084505467906e-05,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.522,
10
+ "f1": 0.4432857142857143,
11
+ "hf_subset": "default",
12
+ "languages": [
13
+ "dan-Latn"
14
+ ],
15
+ "main_score": 0.4432857142857143,
16
+ "precision": 0.41128717948717947,
17
+ "recall": 0.522
18
+ }
19
+ ]
20
+ },
21
+ "task_name": "BornholmBitextMining"
22
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/BrazilianToxicTweetsClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "fb4f11a5bc68b99891852d20f1ec074be6289768",
3
+ "evaluation_time": 4.471953868865967,
4
+ "kg_co2_emissions": 0.00016421731814662404,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.186962890625,
10
+ "f1": 0.16201349864446146,
11
+ "hf_subset": "default",
12
+ "languages": [
13
+ "por-Latn"
14
+ ],
15
+ "lrap": 0.8017557779947927,
16
+ "main_score": 0.186962890625,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.14697265625,
20
+ "f1": 0.16083358302455805,
21
+ "lrap": 0.8210177951388892
22
+ },
23
+ {
24
+ "accuracy": 0.1435546875,
25
+ "f1": 0.17536624167400347,
26
+ "lrap": 0.8049858940972238
27
+ },
28
+ {
29
+ "accuracy": 0.17529296875,
30
+ "f1": 0.16184159281409177,
31
+ "lrap": 0.8106146918402786
32
+ },
33
+ {
34
+ "accuracy": 0.21142578125,
35
+ "f1": 0.15388391893712672,
36
+ "lrap": 0.8200005425347228
37
+ },
38
+ {
39
+ "accuracy": 0.1953125,
40
+ "f1": 0.163601717551508,
41
+ "lrap": 0.8025309244791679
42
+ },
43
+ {
44
+ "accuracy": 0.158203125,
45
+ "f1": 0.15752479740350503,
46
+ "lrap": 0.8051893446180571
47
+ },
48
+ {
49
+ "accuracy": 0.24755859375,
50
+ "f1": 0.14696684345300104,
51
+ "lrap": 0.7555338541666676
52
+ },
53
+ {
54
+ "accuracy": 0.11474609375,
55
+ "f1": 0.17149828203791276,
56
+ "lrap": 0.79766845703125
57
+ },
58
+ {
59
+ "accuracy": 0.23388671875,
60
+ "f1": 0.1613543005843391,
61
+ "lrap": 0.7918701171875013
62
+ },
63
+ {
64
+ "accuracy": 0.24267578125,
65
+ "f1": 0.1672637089645688,
66
+ "lrap": 0.8081461588541681
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "BrazilianToxicTweetsClassification"
73
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/BulgarianStoreReviewSentimentClassfication.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "701984d6c6efea0e14a1c7850ef70e464c5577c0",
3
+ "evaluation_time": 6.399269342422485,
4
+ "kg_co2_emissions": 0.0002121174269679795,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.6434065934065935,
10
+ "f1": 0.4676553179673367,
11
+ "f1_weighted": 0.682127397309468,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "bul-Cyrl"
15
+ ],
16
+ "main_score": 0.6434065934065935,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.5934065934065934,
20
+ "f1": 0.46116257121884724,
21
+ "f1_weighted": 0.6516138201342158
22
+ },
23
+ {
24
+ "accuracy": 0.5879120879120879,
25
+ "f1": 0.4217228612022255,
26
+ "f1_weighted": 0.6431588193576394
27
+ },
28
+ {
29
+ "accuracy": 0.6263736263736264,
30
+ "f1": 0.4530327293431809,
31
+ "f1_weighted": 0.6702567920674037
32
+ },
33
+ {
34
+ "accuracy": 0.6923076923076923,
35
+ "f1": 0.4476991490436105,
36
+ "f1_weighted": 0.7160691738767261
37
+ },
38
+ {
39
+ "accuracy": 0.6703296703296703,
40
+ "f1": 0.4810523033057115,
41
+ "f1_weighted": 0.6856946047804108
42
+ },
43
+ {
44
+ "accuracy": 0.5934065934065934,
45
+ "f1": 0.43716011938569044,
46
+ "f1_weighted": 0.6394128381310774
47
+ },
48
+ {
49
+ "accuracy": 0.6373626373626373,
50
+ "f1": 0.4888949625767387,
51
+ "f1_weighted": 0.6957660286059916
52
+ },
53
+ {
54
+ "accuracy": 0.6813186813186813,
55
+ "f1": 0.5049461036558706,
56
+ "f1_weighted": 0.7054585259357734
57
+ },
58
+ {
59
+ "accuracy": 0.6648351648351648,
60
+ "f1": 0.4836015291109448,
61
+ "f1_weighted": 0.7024791628980294
62
+ },
63
+ {
64
+ "accuracy": 0.6868131868131868,
65
+ "f1": 0.4972808508305466,
66
+ "f1_weighted": 0.7113642073074121
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "BulgarianStoreReviewSentimentClassfication"
73
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/CEDRClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "c0ba03d058e3e1b2f3fd20518875a4563dd12db4",
3
+ "evaluation_time": 4.379773378372192,
4
+ "kg_co2_emissions": 0.00016132615808649964,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.4163124335812965,
10
+ "f1": 0.3582504040158968,
11
+ "hf_subset": "default",
12
+ "languages": [
13
+ "rus-Cyrl"
14
+ ],
15
+ "lrap": 0.6744075451647296,
16
+ "main_score": 0.4163124335812965,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.39107332624867164,
20
+ "f1": 0.30363975478751165,
21
+ "lrap": 0.6292242295430517
22
+ },
23
+ {
24
+ "accuracy": 0.41764080765143463,
25
+ "f1": 0.36827023404137,
26
+ "lrap": 0.6788522848034118
27
+ },
28
+ {
29
+ "accuracy": 0.44155154091392135,
30
+ "f1": 0.3581088621914663,
31
+ "lrap": 0.6616896918172271
32
+ },
33
+ {
34
+ "accuracy": 0.4075451647183847,
35
+ "f1": 0.3577153139017929,
36
+ "lrap": 0.6602550478214785
37
+ },
38
+ {
39
+ "accuracy": 0.4287991498405951,
40
+ "f1": 0.3577537447403594,
41
+ "lrap": 0.7056323060573967
42
+ },
43
+ {
44
+ "accuracy": 0.4086078639744952,
45
+ "f1": 0.35846507069124245,
46
+ "lrap": 0.6671094580233908
47
+ },
48
+ {
49
+ "accuracy": 0.4303931987247609,
50
+ "f1": 0.3656675334996043,
51
+ "lrap": 0.6865037194474068
52
+ },
53
+ {
54
+ "accuracy": 0.4160467587672689,
55
+ "f1": 0.36562112322113166,
56
+ "lrap": 0.7014346439957595
57
+ },
58
+ {
59
+ "accuracy": 0.4070138150903294,
60
+ "f1": 0.35261598493698315,
61
+ "lrap": 0.6606269925611169
62
+ },
63
+ {
64
+ "accuracy": 0.41445270988310307,
65
+ "f1": 0.3946464181475067,
66
+ "lrap": 0.6927470775770567
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "CEDRClassification"
73
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/CLSClusteringP2P.v2.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "4b6227591c6c1a73bc76b1055f3b7f3588e72476",
3
+ "evaluation_time": 46.09179091453552,
4
+ "kg_co2_emissions": 0.0014140877297600509,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "cmn-Hans"
12
+ ],
13
+ "main_score": 0.36930527026523097,
14
+ "v_measure": 0.36930527026523097,
15
+ "v_measure_std": 0.014725255595727507,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.36958673673692816,
19
+ 0.3832206815239315,
20
+ 0.3576143657502721,
21
+ 0.360431397481673,
22
+ 0.3895035099934079,
23
+ 0.36522666816886235,
24
+ 0.38506839786803965,
25
+ 0.3528414159563135,
26
+ 0.3451062826099033,
27
+ 0.3844532465629788
28
+ ]
29
+ }
30
+ }
31
+ ]
32
+ },
33
+ "task_name": "CLSClusteringP2P.v2"
34
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/CSFDSKMovieReviewSentimentClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "23a20c659d868740ef9c54854de631fe19cd5c17",
3
+ "evaluation_time": 10.673860788345337,
4
+ "kg_co2_emissions": 0.0005239210393028656,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.32275390625,
10
+ "f1": 0.31413504602180276,
11
+ "f1_weighted": 0.31612586900632866,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "slk-Latn"
15
+ ],
16
+ "main_score": 0.32275390625,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.3203125,
20
+ "f1": 0.3121747387810991,
21
+ "f1_weighted": 0.3140242218163977
22
+ },
23
+ {
24
+ "accuracy": 0.3359375,
25
+ "f1": 0.3264614823084104,
26
+ "f1_weighted": 0.32878904459916797
27
+ },
28
+ {
29
+ "accuracy": 0.32666015625,
30
+ "f1": 0.31264281227712215,
31
+ "f1_weighted": 0.3147173280918371
32
+ },
33
+ {
34
+ "accuracy": 0.34326171875,
35
+ "f1": 0.335633260056341,
36
+ "f1_weighted": 0.33766230539610026
37
+ },
38
+ {
39
+ "accuracy": 0.3212890625,
40
+ "f1": 0.3154658640496726,
41
+ "f1_weighted": 0.31732789931353733
42
+ },
43
+ {
44
+ "accuracy": 0.32080078125,
45
+ "f1": 0.30167571874009164,
46
+ "f1_weighted": 0.3040929739529738
47
+ },
48
+ {
49
+ "accuracy": 0.306640625,
50
+ "f1": 0.2963998560399232,
51
+ "f1_weighted": 0.29861088382007556
52
+ },
53
+ {
54
+ "accuracy": 0.328125,
55
+ "f1": 0.3258537718451703,
56
+ "f1_weighted": 0.32777962598802
57
+ },
58
+ {
59
+ "accuracy": 0.3232421875,
60
+ "f1": 0.3191044814744508,
61
+ "f1_weighted": 0.32040379339018465
62
+ },
63
+ {
64
+ "accuracy": 0.30126953125,
65
+ "f1": 0.2959384746457463,
66
+ "f1_weighted": 0.29785061369499205
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "CSFDSKMovieReviewSentimentClassification"
73
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/CTKFactsNLI.json ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "387ae4582c8054cb52ef57ef0941f19bd8012abf",
3
+ "evaluation_time": 0.7728242874145508,
4
+ "kg_co2_emissions": 3.8751563922414416e-05,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "cosine_accuracy": 0.7653333333333333,
10
+ "cosine_accuracy_threshold": 0.6238183379173279,
11
+ "cosine_ap": 0.8541195652427196,
12
+ "cosine_f1": 0.8508474576271187,
13
+ "cosine_f1_threshold": 0.6238183379173279,
14
+ "cosine_precision": 0.7606060606060606,
15
+ "cosine_recall": 0.9653846153846154,
16
+ "dot_accuracy": 0.7653333333333333,
17
+ "dot_accuracy_threshold": 0.6238184571266174,
18
+ "dot_ap": 0.8541195652427196,
19
+ "dot_f1": 0.8508474576271187,
20
+ "dot_f1_threshold": 0.6238184571266174,
21
+ "dot_precision": 0.7606060606060606,
22
+ "dot_recall": 0.9653846153846154,
23
+ "euclidean_accuracy": 0.7653333333333333,
24
+ "euclidean_accuracy_threshold": 0.8673883080482483,
25
+ "euclidean_ap": 0.8541195652427196,
26
+ "euclidean_f1": 0.8508474576271187,
27
+ "euclidean_f1_threshold": 0.8673883080482483,
28
+ "euclidean_precision": 0.7606060606060606,
29
+ "euclidean_recall": 0.9653846153846154,
30
+ "hf_subset": "default",
31
+ "languages": [
32
+ "ces-Latn"
33
+ ],
34
+ "main_score": 0.8541195652427196,
35
+ "manhattan_accuracy": 0.768,
36
+ "manhattan_accuracy_threshold": 18.687667846679688,
37
+ "manhattan_ap": 0.8527086607837427,
38
+ "manhattan_f1": 0.8522920203735145,
39
+ "manhattan_f1_threshold": 18.687667846679688,
40
+ "manhattan_precision": 0.7629179331306991,
41
+ "manhattan_recall": 0.9653846153846154,
42
+ "max_accuracy": 0.768,
43
+ "max_ap": 0.8541195652427196,
44
+ "max_f1": 0.8522920203735145,
45
+ "max_precision": 0.7629179331306991,
46
+ "max_recall": 0.9653846153846154,
47
+ "similarity_accuracy": 0.7653333333333333,
48
+ "similarity_accuracy_threshold": 0.6238183379173279,
49
+ "similarity_ap": 0.8541195652427196,
50
+ "similarity_f1": 0.8508474576271187,
51
+ "similarity_f1_threshold": 0.6238183379173279,
52
+ "similarity_precision": 0.7606060606060606,
53
+ "similarity_recall": 0.9653846153846154
54
+ }
55
+ ],
56
+ "validation": [
57
+ {
58
+ "cosine_accuracy": 0.6524590163934426,
59
+ "cosine_accuracy_threshold": 0.7035440802574158,
60
+ "cosine_ap": 0.7552777641655737,
61
+ "cosine_f1": 0.7756813417190775,
62
+ "cosine_f1_threshold": 0.5972406268119812,
63
+ "cosine_precision": 0.6468531468531469,
64
+ "cosine_recall": 0.9685863874345549,
65
+ "dot_accuracy": 0.6524590163934426,
66
+ "dot_accuracy_threshold": 0.703544020652771,
67
+ "dot_ap": 0.7552777641655737,
68
+ "dot_f1": 0.7756813417190775,
69
+ "dot_f1_threshold": 0.597240686416626,
70
+ "dot_precision": 0.6468531468531469,
71
+ "dot_recall": 0.9685863874345549,
72
+ "euclidean_accuracy": 0.6524590163934426,
73
+ "euclidean_accuracy_threshold": 0.7700075507164001,
74
+ "euclidean_ap": 0.7552777641655737,
75
+ "euclidean_f1": 0.7756813417190775,
76
+ "euclidean_f1_threshold": 0.8975067138671875,
77
+ "euclidean_precision": 0.6468531468531469,
78
+ "euclidean_recall": 0.9685863874345549,
79
+ "hf_subset": "default",
80
+ "languages": [
81
+ "ces-Latn"
82
+ ],
83
+ "main_score": 0.7574500968292195,
84
+ "manhattan_accuracy": 0.6557377049180327,
85
+ "manhattan_accuracy_threshold": 17.014137268066406,
86
+ "manhattan_ap": 0.7574500968292195,
87
+ "manhattan_f1": 0.7768595041322314,
88
+ "manhattan_f1_threshold": 20.028038024902344,
89
+ "manhattan_precision": 0.6416382252559727,
90
+ "manhattan_recall": 0.9842931937172775,
91
+ "max_accuracy": 0.6557377049180327,
92
+ "max_ap": 0.7574500968292195,
93
+ "max_f1": 0.7768595041322314,
94
+ "max_precision": 0.6468531468531469,
95
+ "max_recall": 0.9842931937172775,
96
+ "similarity_accuracy": 0.6524590163934426,
97
+ "similarity_accuracy_threshold": 0.7035440802574158,
98
+ "similarity_ap": 0.7552777641655737,
99
+ "similarity_f1": 0.7756813417190775,
100
+ "similarity_f1_threshold": 0.5972406268119812,
101
+ "similarity_precision": 0.6468531468531469,
102
+ "similarity_recall": 0.9685863874345549
103
+ }
104
+ ]
105
+ },
106
+ "task_name": "CTKFactsNLI"
107
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/CataloniaTweetClassification.json ADDED
@@ -0,0 +1,261 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "cf24d44e517efa534f048e5fc5981f399ed25bee",
3
+ "evaluation_time": 21.169485092163086,
4
+ "kg_co2_emissions": 0.0008908893089622294,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.48665,
10
+ "f1": 0.4958377338135599,
11
+ "f1_weighted": 0.4761199601914624,
12
+ "hf_subset": "spanish",
13
+ "languages": [
14
+ "spa-Latn"
15
+ ],
16
+ "main_score": 0.48665,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.509,
20
+ "f1": 0.5171294595817693,
21
+ "f1_weighted": 0.4984645975330595
22
+ },
23
+ {
24
+ "accuracy": 0.467,
25
+ "f1": 0.4709119591648078,
26
+ "f1_weighted": 0.45672508892506175
27
+ },
28
+ {
29
+ "accuracy": 0.4395,
30
+ "f1": 0.44236364639743925,
31
+ "f1_weighted": 0.42536271408812304
32
+ },
33
+ {
34
+ "accuracy": 0.466,
35
+ "f1": 0.47858994189929444,
36
+ "f1_weighted": 0.4598712396316713
37
+ },
38
+ {
39
+ "accuracy": 0.4735,
40
+ "f1": 0.47670347197707325,
41
+ "f1_weighted": 0.45376216090829014
42
+ },
43
+ {
44
+ "accuracy": 0.5315,
45
+ "f1": 0.5440913884397248,
46
+ "f1_weighted": 0.5290386124644052
47
+ },
48
+ {
49
+ "accuracy": 0.5075,
50
+ "f1": 0.5269967284792475,
51
+ "f1_weighted": 0.49822557852792193
52
+ },
53
+ {
54
+ "accuracy": 0.498,
55
+ "f1": 0.4950640793779743,
56
+ "f1_weighted": 0.4776867308535272
57
+ },
58
+ {
59
+ "accuracy": 0.4575,
60
+ "f1": 0.46209181337716093,
61
+ "f1_weighted": 0.4495457719344183
62
+ },
63
+ {
64
+ "accuracy": 0.517,
65
+ "f1": 0.5444348494411072,
66
+ "f1_weighted": 0.5125171070481458
67
+ }
68
+ ]
69
+ },
70
+ {
71
+ "accuracy": 0.4936,
72
+ "f1": 0.486429668998506,
73
+ "f1_weighted": 0.4919859829175838,
74
+ "hf_subset": "catalan",
75
+ "languages": [
76
+ "cat-Latn"
77
+ ],
78
+ "main_score": 0.4936,
79
+ "scores_per_experiment": [
80
+ {
81
+ "accuracy": 0.542,
82
+ "f1": 0.5331811774938978,
83
+ "f1_weighted": 0.543724102971383
84
+ },
85
+ {
86
+ "accuracy": 0.4025,
87
+ "f1": 0.40529113697555824,
88
+ "f1_weighted": 0.4017634142079278
89
+ },
90
+ {
91
+ "accuracy": 0.5405,
92
+ "f1": 0.5367196130458485,
93
+ "f1_weighted": 0.5409588971072757
94
+ },
95
+ {
96
+ "accuracy": 0.496,
97
+ "f1": 0.4869654884942592,
98
+ "f1_weighted": 0.49486890778102655
99
+ },
100
+ {
101
+ "accuracy": 0.459,
102
+ "f1": 0.41710963632664505,
103
+ "f1_weighted": 0.4413548141330438
104
+ },
105
+ {
106
+ "accuracy": 0.5225,
107
+ "f1": 0.5195937938391677,
108
+ "f1_weighted": 0.5213124890678718
109
+ },
110
+ {
111
+ "accuracy": 0.5125,
112
+ "f1": 0.505487096844393,
113
+ "f1_weighted": 0.5118868465707199
114
+ },
115
+ {
116
+ "accuracy": 0.49,
117
+ "f1": 0.4918300206773645,
118
+ "f1_weighted": 0.4896146982495454
119
+ },
120
+ {
121
+ "accuracy": 0.5265,
122
+ "f1": 0.5237608076806496,
123
+ "f1_weighted": 0.5281354456200846
124
+ },
125
+ {
126
+ "accuracy": 0.4445,
127
+ "f1": 0.4443579186072764,
128
+ "f1_weighted": 0.44624021346695963
129
+ }
130
+ ]
131
+ }
132
+ ],
133
+ "validation": [
134
+ {
135
+ "accuracy": 0.48599999999999993,
136
+ "f1": 0.4943007866065982,
137
+ "f1_weighted": 0.47587347955726367,
138
+ "hf_subset": "spanish",
139
+ "languages": [
140
+ "spa-Latn"
141
+ ],
142
+ "main_score": 0.48599999999999993,
143
+ "scores_per_experiment": [
144
+ {
145
+ "accuracy": 0.506,
146
+ "f1": 0.5144195742850902,
147
+ "f1_weighted": 0.4934934087501329
148
+ },
149
+ {
150
+ "accuracy": 0.4885,
151
+ "f1": 0.49242247823988344,
152
+ "f1_weighted": 0.482210230691021
153
+ },
154
+ {
155
+ "accuracy": 0.4375,
156
+ "f1": 0.44107385296011276,
157
+ "f1_weighted": 0.42236570706641396
158
+ },
159
+ {
160
+ "accuracy": 0.475,
161
+ "f1": 0.48661855215958777,
162
+ "f1_weighted": 0.4684221925654096
163
+ },
164
+ {
165
+ "accuracy": 0.466,
166
+ "f1": 0.47353712774040385,
167
+ "f1_weighted": 0.44852436386289873
168
+ },
169
+ {
170
+ "accuracy": 0.5205,
171
+ "f1": 0.5361810123664744,
172
+ "f1_weighted": 0.5182463173699038
173
+ },
174
+ {
175
+ "accuracy": 0.5005,
176
+ "f1": 0.518881602748479,
177
+ "f1_weighted": 0.4926132503789786
178
+ },
179
+ {
180
+ "accuracy": 0.496,
181
+ "f1": 0.48242164496261025,
182
+ "f1_weighted": 0.4732929378680793
183
+ },
184
+ {
185
+ "accuracy": 0.4485,
186
+ "f1": 0.45694868138516426,
187
+ "f1_weighted": 0.4411090404291859
188
+ },
189
+ {
190
+ "accuracy": 0.5215,
191
+ "f1": 0.5405033392181767,
192
+ "f1_weighted": 0.5184573465906128
193
+ }
194
+ ]
195
+ },
196
+ {
197
+ "accuracy": 0.48405000000000004,
198
+ "f1": 0.47698576171232016,
199
+ "f1_weighted": 0.48345715472759193,
200
+ "hf_subset": "catalan",
201
+ "languages": [
202
+ "cat-Latn"
203
+ ],
204
+ "main_score": 0.48405000000000004,
205
+ "scores_per_experiment": [
206
+ {
207
+ "accuracy": 0.5275,
208
+ "f1": 0.5224450835792357,
209
+ "f1_weighted": 0.5287394275589593
210
+ },
211
+ {
212
+ "accuracy": 0.415,
213
+ "f1": 0.41530990598248746,
214
+ "f1_weighted": 0.41520630693643934
215
+ },
216
+ {
217
+ "accuracy": 0.545,
218
+ "f1": 0.5409342961982029,
219
+ "f1_weighted": 0.5479930779360997
220
+ },
221
+ {
222
+ "accuracy": 0.506,
223
+ "f1": 0.494813132774021,
224
+ "f1_weighted": 0.5052477146150051
225
+ },
226
+ {
227
+ "accuracy": 0.4445,
228
+ "f1": 0.4056601546977712,
229
+ "f1_weighted": 0.42981917443523515
230
+ },
231
+ {
232
+ "accuracy": 0.4995,
233
+ "f1": 0.49880747719889706,
234
+ "f1_weighted": 0.4993734428549102
235
+ },
236
+ {
237
+ "accuracy": 0.4885,
238
+ "f1": 0.4802217917801243,
239
+ "f1_weighted": 0.4900506551642571
240
+ },
241
+ {
242
+ "accuracy": 0.4685,
243
+ "f1": 0.47114064563977137,
244
+ "f1_weighted": 0.4680776528938817
245
+ },
246
+ {
247
+ "accuracy": 0.5235,
248
+ "f1": 0.5178948313226949,
249
+ "f1_weighted": 0.5271407828603804
250
+ },
251
+ {
252
+ "accuracy": 0.4225,
253
+ "f1": 0.42263029794999524,
254
+ "f1_weighted": 0.4229233120207512
255
+ }
256
+ ]
257
+ }
258
+ ]
259
+ },
260
+ "task_name": "CataloniaTweetClassification"
261
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/ClimateFEVERHardNegatives.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "3a309e201f3c2c4b13bd4a367a8f37eee2ec1d21",
3
+ "evaluation_time": 118.1490204334259,
4
+ "kg_co2_emissions": 0.011016943125238534,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "eng-Latn"
12
+ ],
13
+ "main_score": 0.35965,
14
+ "map_at_1": 0.16005,
15
+ "map_at_10": 0.2683,
16
+ "map_at_100": 0.28972,
17
+ "map_at_1000": 0.29179,
18
+ "map_at_20": 0.27983,
19
+ "map_at_3": 0.22516,
20
+ "map_at_5": 0.24947,
21
+ "mrr_at_1": 0.364,
22
+ "mrr_at_10": 0.4737849206349203,
23
+ "mrr_at_100": 0.4828597907878159,
24
+ "mrr_at_1000": 0.483077217955487,
25
+ "mrr_at_20": 0.47950173816981034,
26
+ "mrr_at_3": 0.43966666666666643,
27
+ "mrr_at_5": 0.46131666666666604,
28
+ "nauc_map_at_1000_diff1": 0.3160780342713915,
29
+ "nauc_map_at_1000_max": 0.39679753963078607,
30
+ "nauc_map_at_1000_std": 0.18258880968533445,
31
+ "nauc_map_at_100_diff1": 0.3158312419572046,
32
+ "nauc_map_at_100_max": 0.39699952923880544,
33
+ "nauc_map_at_100_std": 0.18251534834405753,
34
+ "nauc_map_at_10_diff1": 0.31980868848864685,
35
+ "nauc_map_at_10_max": 0.39380370175456836,
36
+ "nauc_map_at_10_std": 0.17252435264274638,
37
+ "nauc_map_at_1_diff1": 0.3908121475494815,
38
+ "nauc_map_at_1_max": 0.3405400235356674,
39
+ "nauc_map_at_1_std": 0.1179524323938485,
40
+ "nauc_map_at_20_diff1": 0.31818326967792065,
41
+ "nauc_map_at_20_max": 0.39478525739961967,
42
+ "nauc_map_at_20_std": 0.17755712954140637,
43
+ "nauc_map_at_3_diff1": 0.3233936923119557,
44
+ "nauc_map_at_3_max": 0.3685234283152992,
45
+ "nauc_map_at_3_std": 0.13824156699144302,
46
+ "nauc_map_at_5_diff1": 0.32919103815409695,
47
+ "nauc_map_at_5_max": 0.3881890649815373,
48
+ "nauc_map_at_5_std": 0.1589036850398479,
49
+ "nauc_mrr_at_1000_diff1": 0.29844961294864714,
50
+ "nauc_mrr_at_1000_max": 0.37333611168665953,
51
+ "nauc_mrr_at_1000_std": 0.2047013659203655,
52
+ "nauc_mrr_at_100_diff1": 0.29838889123074763,
53
+ "nauc_mrr_at_100_max": 0.3734007153780295,
54
+ "nauc_mrr_at_100_std": 0.20485709334648444,
55
+ "nauc_mrr_at_10_diff1": 0.2968028832316489,
56
+ "nauc_mrr_at_10_max": 0.3733766108650763,
57
+ "nauc_mrr_at_10_std": 0.2062009812878929,
58
+ "nauc_mrr_at_1_diff1": 0.3356914985568133,
59
+ "nauc_mrr_at_1_max": 0.35231040823061655,
60
+ "nauc_mrr_at_1_std": 0.17863167048198766,
61
+ "nauc_mrr_at_20_diff1": 0.29742403888676966,
62
+ "nauc_mrr_at_20_max": 0.3728782934910517,
63
+ "nauc_mrr_at_20_std": 0.20429617202585656,
64
+ "nauc_mrr_at_3_diff1": 0.29493488947915586,
65
+ "nauc_mrr_at_3_max": 0.37038462205737055,
66
+ "nauc_mrr_at_3_std": 0.1960873632696855,
67
+ "nauc_mrr_at_5_diff1": 0.29845649558800996,
68
+ "nauc_mrr_at_5_max": 0.3733010674594404,
69
+ "nauc_mrr_at_5_std": 0.20244040553182904,
70
+ "nauc_ndcg_at_1000_diff1": 0.2947526983521663,
71
+ "nauc_ndcg_at_1000_max": 0.4049046952986035,
72
+ "nauc_ndcg_at_1000_std": 0.22598137896459156,
73
+ "nauc_ndcg_at_100_diff1": 0.2892959393475447,
74
+ "nauc_ndcg_at_100_max": 0.409479584414591,
75
+ "nauc_ndcg_at_100_std": 0.2302582542606591,
76
+ "nauc_ndcg_at_10_diff1": 0.2969183197304918,
77
+ "nauc_ndcg_at_10_max": 0.3976397432239702,
78
+ "nauc_ndcg_at_10_std": 0.20093166700813117,
79
+ "nauc_ndcg_at_1_diff1": 0.3356914985568133,
80
+ "nauc_ndcg_at_1_max": 0.35231040823061655,
81
+ "nauc_ndcg_at_1_std": 0.17863167048198766,
82
+ "nauc_ndcg_at_20_diff1": 0.2941140050842346,
83
+ "nauc_ndcg_at_20_max": 0.39929847376622474,
84
+ "nauc_ndcg_at_20_std": 0.20953583503071954,
85
+ "nauc_ndcg_at_3_diff1": 0.297241921766262,
86
+ "nauc_ndcg_at_3_max": 0.36497681162122814,
87
+ "nauc_ndcg_at_3_std": 0.16160099422415364,
88
+ "nauc_ndcg_at_5_diff1": 0.3123809649232694,
89
+ "nauc_ndcg_at_5_max": 0.3919809444148372,
90
+ "nauc_ndcg_at_5_std": 0.181147182466673,
91
+ "nauc_precision_at_1000_diff1": -0.07820361030248468,
92
+ "nauc_precision_at_1000_max": 0.03704168667079107,
93
+ "nauc_precision_at_1000_std": 0.1587026611351975,
94
+ "nauc_precision_at_100_diff1": -0.0035049308567726995,
95
+ "nauc_precision_at_100_max": 0.20736415433149413,
96
+ "nauc_precision_at_100_std": 0.26909829389987894,
97
+ "nauc_precision_at_10_diff1": 0.12251350162308011,
98
+ "nauc_precision_at_10_max": 0.31850782887606605,
99
+ "nauc_precision_at_10_std": 0.24527353039332286,
100
+ "nauc_precision_at_1_diff1": 0.3356914985568133,
101
+ "nauc_precision_at_1_max": 0.35231040823061655,
102
+ "nauc_precision_at_1_std": 0.17863167048198766,
103
+ "nauc_precision_at_20_diff1": 0.07955594197527795,
104
+ "nauc_precision_at_20_max": 0.2786872102594437,
105
+ "nauc_precision_at_20_std": 0.2514107685199376,
106
+ "nauc_precision_at_3_diff1": 0.19275818010875823,
107
+ "nauc_precision_at_3_max": 0.35352000385716115,
108
+ "nauc_precision_at_3_std": 0.1887451828535416,
109
+ "nauc_precision_at_5_diff1": 0.181314015932855,
110
+ "nauc_precision_at_5_max": 0.3430779808900593,
111
+ "nauc_precision_at_5_std": 0.2178482940939375,
112
+ "nauc_recall_at_1000_diff1": 0.15799330057903438,
113
+ "nauc_recall_at_1000_max": 0.3371133598741174,
114
+ "nauc_recall_at_1000_std": 0.3239984150553473,
115
+ "nauc_recall_at_100_diff1": 0.16493978049488822,
116
+ "nauc_recall_at_100_max": 0.359305298221647,
117
+ "nauc_recall_at_100_std": 0.2847804247777974,
118
+ "nauc_recall_at_10_diff1": 0.22900241790134798,
119
+ "nauc_recall_at_10_max": 0.35971647236750903,
120
+ "nauc_recall_at_10_std": 0.20137819873906618,
121
+ "nauc_recall_at_1_diff1": 0.3908121475494815,
122
+ "nauc_recall_at_1_max": 0.3405400235356674,
123
+ "nauc_recall_at_1_std": 0.1179524323938485,
124
+ "nauc_recall_at_20_diff1": 0.21387743545608776,
125
+ "nauc_recall_at_20_max": 0.34722353277244866,
126
+ "nauc_recall_at_20_std": 0.21052049538153708,
127
+ "nauc_recall_at_3_diff1": 0.2696240672790563,
128
+ "nauc_recall_at_3_max": 0.3468303440337917,
129
+ "nauc_recall_at_3_std": 0.13420622638800417,
130
+ "nauc_recall_at_5_diff1": 0.2728939572377652,
131
+ "nauc_recall_at_5_max": 0.3667879785969204,
132
+ "nauc_recall_at_5_std": 0.17061920955432983,
133
+ "ndcg_at_1": 0.364,
134
+ "ndcg_at_10": 0.35965,
135
+ "ndcg_at_100": 0.43929,
136
+ "ndcg_at_1000": 0.47265,
137
+ "ndcg_at_20": 0.38994,
138
+ "ndcg_at_3": 0.2996,
139
+ "ndcg_at_5": 0.32376,
140
+ "precision_at_1": 0.364,
141
+ "precision_at_10": 0.1091,
142
+ "precision_at_100": 0.01961,
143
+ "precision_at_1000": 0.0026,
144
+ "precision_at_20": 0.06765,
145
+ "precision_at_3": 0.21867,
146
+ "precision_at_5": 0.1704,
147
+ "recall_at_1": 0.16005,
148
+ "recall_at_10": 0.41642,
149
+ "recall_at_100": 0.68748,
150
+ "recall_at_1000": 0.86965,
151
+ "recall_at_20": 0.50007,
152
+ "recall_at_3": 0.26745,
153
+ "recall_at_5": 0.3373
154
+ }
155
+ ]
156
+ },
157
+ "task_name": "ClimateFEVERHardNegatives"
158
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/Core17InstructionRetrieval.json ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "e39ff896cf3efbbdeeb950e6bd7c79f266995b07",
3
+ "evaluation_time": 156.9366044998169,
4
+ "kg_co2_emissions": 0.014094752956091024,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "individual": {
11
+ "changed": {
12
+ "map_at_1": 0.01393,
13
+ "map_at_10": 0.07602,
14
+ "map_at_100": 0.20691,
15
+ "map_at_1000": 0.27405,
16
+ "map_at_20": 0.12029,
17
+ "map_at_3": 0.03447,
18
+ "map_at_5": 0.04765,
19
+ "mrr_at_1": 0.55,
20
+ "mrr_at_10": 0.6268650793650794,
21
+ "mrr_at_100": 0.6300848936065655,
22
+ "mrr_at_1000": 0.6300848936065655,
23
+ "mrr_at_20": 0.6294966583124478,
24
+ "mrr_at_3": 0.575,
25
+ "mrr_at_5": 0.5974999999999999,
26
+ "naucs_at_1000_diff1": 0.5816601395794363,
27
+ "naucs_at_1000_max": 0.3576046311926546,
28
+ "naucs_at_1000_std": 0.16551544938254256,
29
+ "naucs_at_100_diff1": 0.5816601395794363,
30
+ "naucs_at_100_max": 0.3576046311926546,
31
+ "naucs_at_100_std": 0.16551544938254256,
32
+ "naucs_at_10_diff1": 0.5833511586613306,
33
+ "naucs_at_10_max": 0.359197396406324,
34
+ "naucs_at_10_std": 0.16123268109895256,
35
+ "naucs_at_1_diff1": 0.6203749210138315,
36
+ "naucs_at_1_max": 0.40834093940883215,
37
+ "naucs_at_1_std": 0.2413115214491331,
38
+ "naucs_at_20_diff1": 0.5807141592636915,
39
+ "naucs_at_20_max": 0.3544931303019406,
40
+ "naucs_at_20_std": 0.16158728987037074,
41
+ "naucs_at_3_diff1": 0.5817309426747,
42
+ "naucs_at_3_max": 0.3919694072657743,
43
+ "naucs_at_3_std": 0.22031819329701688,
44
+ "naucs_at_5_diff1": 0.554833456532099,
45
+ "naucs_at_5_max": 0.3856108845171051,
46
+ "naucs_at_5_std": 0.21081247396878935,
47
+ "ndcg_at_1": 0.4,
48
+ "ndcg_at_10": 0.33556,
49
+ "ndcg_at_100": 0.39138,
50
+ "ndcg_at_1000": 0.636,
51
+ "ndcg_at_20": 0.34091,
52
+ "ndcg_at_3": 0.37654,
53
+ "ndcg_at_5": 0.34965,
54
+ "precision_at_1": 0.55,
55
+ "precision_at_10": 0.395,
56
+ "precision_at_100": 0.2125,
57
+ "precision_at_1000": 0.0545,
58
+ "precision_at_20": 0.37,
59
+ "precision_at_3": 0.46667,
60
+ "precision_at_5": 0.43,
61
+ "recall_at_1": 0.01393,
62
+ "recall_at_10": 0.09133,
63
+ "recall_at_100": 0.39973,
64
+ "recall_at_1000": 1.0,
65
+ "recall_at_20": 0.16249,
66
+ "recall_at_3": 0.0353,
67
+ "recall_at_5": 0.05158
68
+ },
69
+ "original": {
70
+ "map_at_1": 0.00223,
71
+ "map_at_10": 0.03398,
72
+ "map_at_100": 0.10924,
73
+ "map_at_1000": 0.14931,
74
+ "map_at_20": 0.05826,
75
+ "map_at_3": 0.01125,
76
+ "map_at_5": 0.01692,
77
+ "mrr_at_1": 0.1,
78
+ "mrr_at_10": 0.2554166666666667,
79
+ "mrr_at_100": 0.2753917203486169,
80
+ "mrr_at_1000": 0.27579172034861693,
81
+ "mrr_at_20": 0.27366758241758243,
82
+ "mrr_at_3": 0.2,
83
+ "mrr_at_5": 0.2325,
84
+ "naucs_at_1000_diff1": -0.16665355438395124,
85
+ "naucs_at_1000_max": 0.40578304639787394,
86
+ "naucs_at_1000_std": 0.2798194842113217,
87
+ "naucs_at_100_diff1": -0.1648384375544595,
88
+ "naucs_at_100_max": 0.40477184642167646,
89
+ "naucs_at_100_std": 0.2762805151042291,
90
+ "naucs_at_10_diff1": -0.17582955768375,
91
+ "naucs_at_10_max": 0.39799993768280695,
92
+ "naucs_at_10_std": 0.2971339683828128,
93
+ "naucs_at_1_diff1": -0.2005634558626242,
94
+ "naucs_at_1_max": 0.6619264824255434,
95
+ "naucs_at_1_std": 0.4928897236383149,
96
+ "naucs_at_20_diff1": -0.15610706907487606,
97
+ "naucs_at_20_max": 0.4026581169289708,
98
+ "naucs_at_20_std": 0.2655764873812179,
99
+ "naucs_at_3_diff1": -0.22317477876106215,
100
+ "naucs_at_3_max": 0.37410889872173064,
101
+ "naucs_at_3_std": 0.3784568584070797,
102
+ "naucs_at_5_diff1": -0.21209464167870404,
103
+ "naucs_at_5_max": 0.4150041610711414,
104
+ "naucs_at_5_std": 0.33142124038377235,
105
+ "ndcg_at_1": 0.05,
106
+ "ndcg_at_10": 0.13377,
107
+ "ndcg_at_100": 0.26608,
108
+ "ndcg_at_1000": 0.48251,
109
+ "ndcg_at_20": 0.16969,
110
+ "ndcg_at_3": 0.08417,
111
+ "ndcg_at_5": 0.10716,
112
+ "precision_at_1": 0.1,
113
+ "precision_at_10": 0.195,
114
+ "precision_at_100": 0.1225,
115
+ "precision_at_1000": 0.0327,
116
+ "precision_at_20": 0.1925,
117
+ "precision_at_3": 0.15,
118
+ "precision_at_5": 0.18,
119
+ "recall_at_1": 0.00223,
120
+ "recall_at_10": 0.07176,
121
+ "recall_at_100": 0.36603,
122
+ "recall_at_1000": 1.0,
123
+ "recall_at_20": 0.13982,
124
+ "recall_at_3": 0.01994,
125
+ "recall_at_5": 0.03357
126
+ }
127
+ },
128
+ "languages": [
129
+ "eng-Latn"
130
+ ],
131
+ "main_score": 0.02019801127002012,
132
+ "p-MRR": 0.02019801127002012
133
+ }
134
+ ]
135
+ },
136
+ "task_name": "Core17InstructionRetrieval"
137
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/CovidRetrieval.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "1271c7809071a13532e05f25fb53511ffce77117",
3
+ "evaluation_time": 199.672771692276,
4
+ "kg_co2_emissions": 0.01869778296849748,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "dev": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "cmn-Hans"
12
+ ],
13
+ "main_score": 0.80614,
14
+ "map_at_1": 0.68124,
15
+ "map_at_10": 0.76718,
16
+ "map_at_100": 0.77055,
17
+ "map_at_1000": 0.77058,
18
+ "map_at_20": 0.76963,
19
+ "map_at_3": 0.74982,
20
+ "map_at_5": 0.76015,
21
+ "mrr_at_1": 0.6838777660695469,
22
+ "mrr_at_10": 0.768390285513573,
23
+ "mrr_at_100": 0.7716926855765665,
24
+ "mrr_at_1000": 0.7717294499768402,
25
+ "mrr_at_20": 0.7707835600848288,
26
+ "mrr_at_3": 0.7518440463645945,
27
+ "mrr_at_5": 0.7618018967334036,
28
+ "nauc_map_at_1000_diff1": 0.778267108085784,
29
+ "nauc_map_at_1000_max": 0.4012233579068954,
30
+ "nauc_map_at_1000_std": -0.518336932581611,
31
+ "nauc_map_at_100_diff1": 0.7782582050892276,
32
+ "nauc_map_at_100_max": 0.40128034837709453,
33
+ "nauc_map_at_100_std": -0.5182881361391041,
34
+ "nauc_map_at_10_diff1": 0.7760850502322932,
35
+ "nauc_map_at_10_max": 0.4010120327489881,
36
+ "nauc_map_at_10_std": -0.5206049966851635,
37
+ "nauc_map_at_1_diff1": 0.7999247339106762,
38
+ "nauc_map_at_1_max": 0.38854047323812596,
39
+ "nauc_map_at_1_std": -0.4884811824173493,
40
+ "nauc_map_at_20_diff1": 0.7779759532516435,
41
+ "nauc_map_at_20_max": 0.40218343379048327,
42
+ "nauc_map_at_20_std": -0.5186879666099741,
43
+ "nauc_map_at_3_diff1": 0.7780747456582744,
44
+ "nauc_map_at_3_max": 0.39533430519664137,
45
+ "nauc_map_at_3_std": -0.5315720206600631,
46
+ "nauc_map_at_5_diff1": 0.777239456890133,
47
+ "nauc_map_at_5_max": 0.399467714510124,
48
+ "nauc_map_at_5_std": -0.5317314318533847,
49
+ "nauc_mrr_at_1000_diff1": 0.7767428051313837,
50
+ "nauc_mrr_at_1000_max": 0.4052898342650751,
51
+ "nauc_mrr_at_1000_std": -0.5126791085405565,
52
+ "nauc_mrr_at_100_diff1": 0.7767341544505314,
53
+ "nauc_mrr_at_100_max": 0.40534625237264327,
54
+ "nauc_mrr_at_100_std": -0.51263120088577,
55
+ "nauc_mrr_at_10_diff1": 0.7745445168603503,
56
+ "nauc_mrr_at_10_max": 0.40539497196679214,
57
+ "nauc_mrr_at_10_std": -0.5146566634705355,
58
+ "nauc_mrr_at_1_diff1": 0.7962072804777002,
59
+ "nauc_mrr_at_1_max": 0.39552863764649965,
60
+ "nauc_mrr_at_1_std": -0.47945405244171196,
61
+ "nauc_mrr_at_20_diff1": 0.7764588978949131,
62
+ "nauc_mrr_at_20_max": 0.40622330490381553,
63
+ "nauc_mrr_at_20_std": -0.5130842204932701,
64
+ "nauc_mrr_at_3_diff1": 0.775265200607449,
65
+ "nauc_mrr_at_3_max": 0.40048541738274496,
66
+ "nauc_mrr_at_3_std": -0.5222187568853051,
67
+ "nauc_mrr_at_5_diff1": 0.7752210113632552,
68
+ "nauc_mrr_at_5_max": 0.4038942368875064,
69
+ "nauc_mrr_at_5_std": -0.5244116313016823,
70
+ "nauc_ndcg_at_1000_diff1": 0.7743977216737538,
71
+ "nauc_ndcg_at_1000_max": 0.40910840321023584,
72
+ "nauc_ndcg_at_1000_std": -0.5145762412500857,
73
+ "nauc_ndcg_at_100_diff1": 0.7743523742613972,
74
+ "nauc_ndcg_at_100_max": 0.4111150035554155,
75
+ "nauc_ndcg_at_100_std": -0.5130932184538513,
76
+ "nauc_ndcg_at_10_diff1": 0.7652411201085086,
77
+ "nauc_ndcg_at_10_max": 0.41412145734860245,
78
+ "nauc_ndcg_at_10_std": -0.5214651084861199,
79
+ "nauc_ndcg_at_1_diff1": 0.7962072804777002,
80
+ "nauc_ndcg_at_1_max": 0.39552863764649965,
81
+ "nauc_ndcg_at_1_std": -0.47945405244171196,
82
+ "nauc_ndcg_at_20_diff1": 0.772528039429913,
83
+ "nauc_ndcg_at_20_max": 0.419077232025346,
84
+ "nauc_ndcg_at_20_std": -0.5141935059169217,
85
+ "nauc_ndcg_at_3_diff1": 0.7694070969441097,
86
+ "nauc_ndcg_at_3_max": 0.3987895955956775,
87
+ "nauc_ndcg_at_3_std": -0.5468574078007424,
88
+ "nauc_ndcg_at_5_diff1": 0.7678569818248085,
89
+ "nauc_ndcg_at_5_max": 0.4082148568144639,
90
+ "nauc_ndcg_at_5_std": -0.5491911025895029,
91
+ "nauc_precision_at_1000_diff1": -0.3291159668533681,
92
+ "nauc_precision_at_1000_max": 0.21895893458735813,
93
+ "nauc_precision_at_1000_std": 0.6241822645159126,
94
+ "nauc_precision_at_100_diff1": -0.04636341419225461,
95
+ "nauc_precision_at_100_max": 0.35538985547990604,
96
+ "nauc_precision_at_100_std": 0.39711468603747496,
97
+ "nauc_precision_at_10_diff1": 0.4820370334265886,
98
+ "nauc_precision_at_10_max": 0.46324868094472144,
99
+ "nauc_precision_at_10_std": -0.29464421994118095,
100
+ "nauc_precision_at_1_diff1": 0.7962072804777002,
101
+ "nauc_precision_at_1_max": 0.39552863764649965,
102
+ "nauc_precision_at_1_std": -0.47945405244171196,
103
+ "nauc_precision_at_20_diff1": 0.38246595867875155,
104
+ "nauc_precision_at_20_max": 0.5337526896965299,
105
+ "nauc_precision_at_20_std": -0.07044575710657511,
106
+ "nauc_precision_at_3_diff1": 0.7045209700996767,
107
+ "nauc_precision_at_3_max": 0.41493958189230207,
108
+ "nauc_precision_at_3_std": -0.5773196947503446,
109
+ "nauc_precision_at_5_diff1": 0.6305685594397115,
110
+ "nauc_precision_at_5_max": 0.4406858191418746,
111
+ "nauc_precision_at_5_std": -0.5320264094491101,
112
+ "nauc_recall_at_1000_diff1": 0.6640093437842047,
113
+ "nauc_recall_at_1000_max": 0.8638113013192003,
114
+ "nauc_recall_at_1000_std": 0.27366075802901835,
115
+ "nauc_recall_at_100_diff1": 0.7430043421467465,
116
+ "nauc_recall_at_100_max": 0.7859655104181216,
117
+ "nauc_recall_at_100_std": -0.10269021766268517,
118
+ "nauc_recall_at_10_diff1": 0.6829109704816672,
119
+ "nauc_recall_at_10_max": 0.5187617659231504,
120
+ "nauc_recall_at_10_std": -0.5213171525948888,
121
+ "nauc_recall_at_1_diff1": 0.7999247339106762,
122
+ "nauc_recall_at_1_max": 0.38854047323812596,
123
+ "nauc_recall_at_1_std": -0.4884811824173493,
124
+ "nauc_recall_at_20_diff1": 0.7314640605497832,
125
+ "nauc_recall_at_20_max": 0.6707043886329147,
126
+ "nauc_recall_at_20_std": -0.42049366273882716,
127
+ "nauc_recall_at_3_diff1": 0.7368380042000338,
128
+ "nauc_recall_at_3_max": 0.40437013917969117,
129
+ "nauc_recall_at_3_std": -0.6191194885492559,
130
+ "nauc_recall_at_5_diff1": 0.723604419211838,
131
+ "nauc_recall_at_5_max": 0.44633523137217684,
132
+ "nauc_recall_at_5_std": -0.6495149529461623,
133
+ "ndcg_at_1": 0.68388,
134
+ "ndcg_at_10": 0.80614,
135
+ "ndcg_at_100": 0.82061,
136
+ "ndcg_at_1000": 0.82156,
137
+ "ndcg_at_20": 0.81453,
138
+ "ndcg_at_3": 0.77157,
139
+ "ndcg_at_5": 0.78958,
140
+ "precision_at_1": 0.68388,
141
+ "precision_at_10": 0.09347,
142
+ "precision_at_100": 0.01,
143
+ "precision_at_1000": 0.00101,
144
+ "precision_at_20": 0.04837,
145
+ "precision_at_3": 0.27889,
146
+ "precision_at_5": 0.1764,
147
+ "recall_at_1": 0.68124,
148
+ "recall_at_10": 0.92571,
149
+ "recall_at_100": 0.98946,
150
+ "recall_at_1000": 0.99684,
151
+ "recall_at_20": 0.95785,
152
+ "recall_at_3": 0.83219,
153
+ "recall_at_5": 0.87487
154
+ }
155
+ ]
156
+ },
157
+ "task_name": "CovidRetrieval"
158
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/CyrillicTurkicLangClassification.json ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "e42d330f33d65b7b72dfd408883daf1661f06f18",
3
+ "evaluation_time": 8.270632028579712,
4
+ "kg_co2_emissions": 0.00028926588735390116,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.327734375,
10
+ "f1": 0.31597001082760834,
11
+ "f1_weighted": 0.3160783369073049,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "bak-Cyrl",
15
+ "chv-Cyrl",
16
+ "tat-Cyrl",
17
+ "kir-Cyrl",
18
+ "rus-Cyrl",
19
+ "kaz-Cyrl",
20
+ "tyv-Cyrl",
21
+ "krc-Cyrl",
22
+ "sah-Cyrl"
23
+ ],
24
+ "main_score": 0.327734375,
25
+ "scores_per_experiment": [
26
+ {
27
+ "accuracy": 0.34375,
28
+ "f1": 0.33381058944971453,
29
+ "f1_weighted": 0.33394796930416304
30
+ },
31
+ {
32
+ "accuracy": 0.31884765625,
33
+ "f1": 0.31074979792082913,
34
+ "f1_weighted": 0.3107701687233645
35
+ },
36
+ {
37
+ "accuracy": 0.31201171875,
38
+ "f1": 0.3064413690444711,
39
+ "f1_weighted": 0.306586950891348
40
+ },
41
+ {
42
+ "accuracy": 0.33203125,
43
+ "f1": 0.31520077435596283,
44
+ "f1_weighted": 0.3153288164620823
45
+ },
46
+ {
47
+ "accuracy": 0.33935546875,
48
+ "f1": 0.3245759863637738,
49
+ "f1_weighted": 0.3246692129166081
50
+ },
51
+ {
52
+ "accuracy": 0.3408203125,
53
+ "f1": 0.333239539502051,
54
+ "f1_weighted": 0.33333350265129247
55
+ },
56
+ {
57
+ "accuracy": 0.28271484375,
58
+ "f1": 0.27531579419021973,
59
+ "f1_weighted": 0.2754032153966589
60
+ },
61
+ {
62
+ "accuracy": 0.32763671875,
63
+ "f1": 0.299473279312741,
64
+ "f1_weighted": 0.29961576629842523
65
+ },
66
+ {
67
+ "accuracy": 0.33154296875,
68
+ "f1": 0.3297883363232167,
69
+ "f1_weighted": 0.32989975530671745
70
+ },
71
+ {
72
+ "accuracy": 0.3486328125,
73
+ "f1": 0.3311046418131036,
74
+ "f1_weighted": 0.33122801112238925
75
+ }
76
+ ]
77
+ }
78
+ ]
79
+ },
80
+ "task_name": "CyrillicTurkicLangClassification"
81
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/CzechProductReviewSentimentClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "2e6fedf42c9c104e83dfd95c3a453721e683e244",
3
+ "evaluation_time": 7.736215353012085,
4
+ "kg_co2_emissions": 0.000288723686821838,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.5529296875,
10
+ "f1": 0.5413211523837085,
11
+ "f1_weighted": 0.541274292318336,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "ces-Latn"
15
+ ],
16
+ "main_score": 0.5529296875,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.56298828125,
20
+ "f1": 0.5585594821833989,
21
+ "f1_weighted": 0.5585091181577239
22
+ },
23
+ {
24
+ "accuracy": 0.51318359375,
25
+ "f1": 0.506762207636719,
26
+ "f1_weighted": 0.5067147539449297
27
+ },
28
+ {
29
+ "accuracy": 0.57373046875,
30
+ "f1": 0.5515681521569772,
31
+ "f1_weighted": 0.5515060369905604
32
+ },
33
+ {
34
+ "accuracy": 0.50146484375,
35
+ "f1": 0.48605232136815113,
36
+ "f1_weighted": 0.48603930710581
37
+ },
38
+ {
39
+ "accuracy": 0.54150390625,
40
+ "f1": 0.5337642479402185,
41
+ "f1_weighted": 0.5337256969074715
42
+ },
43
+ {
44
+ "accuracy": 0.55908203125,
45
+ "f1": 0.5394248202444923,
46
+ "f1_weighted": 0.5393717212794602
47
+ },
48
+ {
49
+ "accuracy": 0.57958984375,
50
+ "f1": 0.5738926109676972,
51
+ "f1_weighted": 0.5738345812708222
52
+ },
53
+ {
54
+ "accuracy": 0.55224609375,
55
+ "f1": 0.53701740535335,
56
+ "f1_weighted": 0.5369725431566787
57
+ },
58
+ {
59
+ "accuracy": 0.57568359375,
60
+ "f1": 0.5660900966393232,
61
+ "f1_weighted": 0.5660560821929206
62
+ },
63
+ {
64
+ "accuracy": 0.56982421875,
65
+ "f1": 0.5600801793467568,
66
+ "f1_weighted": 0.5600130821769826
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "CzechProductReviewSentimentClassification"
73
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/DBPedia-PLHardNegatives.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "bebc2b5c8f73cd6ba9d2a4664d5f3769e6ad557a",
3
+ "evaluation_time": 83.05904912948608,
4
+ "kg_co2_emissions": 0.006827361432087784,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "pol-Latn"
12
+ ],
13
+ "main_score": 0.35835,
14
+ "map_at_1": 0.07949,
15
+ "map_at_10": 0.16237,
16
+ "map_at_100": 0.24253,
17
+ "map_at_1000": 0.27421,
18
+ "map_at_20": 0.18647,
19
+ "map_at_3": 0.11928,
20
+ "map_at_5": 0.13951,
21
+ "mrr_at_1": 0.6125,
22
+ "mrr_at_10": 0.6994315476190477,
23
+ "mrr_at_100": 0.7032588295381476,
24
+ "mrr_at_1000": 0.7033433832941113,
25
+ "mrr_at_20": 0.7024395324317924,
26
+ "mrr_at_3": 0.6812500000000001,
27
+ "mrr_at_5": 0.6937500000000001,
28
+ "nauc_map_at_1000_diff1": 0.2127520334613193,
29
+ "nauc_map_at_1000_max": 0.3212793703522635,
30
+ "nauc_map_at_1000_std": 0.18706575715538531,
31
+ "nauc_map_at_100_diff1": 0.2500522853942211,
32
+ "nauc_map_at_100_max": 0.3324120578317355,
33
+ "nauc_map_at_100_std": 0.15094704635609593,
34
+ "nauc_map_at_10_diff1": 0.26230927825322226,
35
+ "nauc_map_at_10_max": 0.21197905077618162,
36
+ "nauc_map_at_10_std": -0.026376522452354844,
37
+ "nauc_map_at_1_diff1": 0.3318063885681161,
38
+ "nauc_map_at_1_max": 0.0848391095311529,
39
+ "nauc_map_at_1_std": -0.1786829026350287,
40
+ "nauc_map_at_20_diff1": 0.2553628148509733,
41
+ "nauc_map_at_20_max": 0.25110849646012495,
42
+ "nauc_map_at_20_std": 0.03275245134662781,
43
+ "nauc_map_at_3_diff1": 0.2844597425248908,
44
+ "nauc_map_at_3_max": 0.1399701411215564,
45
+ "nauc_map_at_3_std": -0.12481315479656024,
46
+ "nauc_map_at_5_diff1": 0.26803025646171147,
47
+ "nauc_map_at_5_max": 0.17886572589943947,
48
+ "nauc_map_at_5_std": -0.07531710106663772,
49
+ "nauc_mrr_at_1000_diff1": 0.428252118800786,
50
+ "nauc_mrr_at_1000_max": 0.49503842062850734,
51
+ "nauc_mrr_at_1000_std": 0.294253365680607,
52
+ "nauc_mrr_at_100_diff1": 0.42825241042594747,
53
+ "nauc_mrr_at_100_max": 0.49515170051470064,
54
+ "nauc_mrr_at_100_std": 0.29443447919406385,
55
+ "nauc_mrr_at_10_diff1": 0.4285255031202195,
56
+ "nauc_mrr_at_10_max": 0.497109418506398,
57
+ "nauc_mrr_at_10_std": 0.2968726735722883,
58
+ "nauc_mrr_at_1_diff1": 0.45633856251304133,
59
+ "nauc_mrr_at_1_max": 0.4430449380655195,
60
+ "nauc_mrr_at_1_std": 0.21089591592844825,
61
+ "nauc_mrr_at_20_diff1": 0.42781325993497143,
62
+ "nauc_mrr_at_20_max": 0.49610698480831883,
63
+ "nauc_mrr_at_20_std": 0.29567698038292023,
64
+ "nauc_mrr_at_3_diff1": 0.4256646542844682,
65
+ "nauc_mrr_at_3_max": 0.48581912019676626,
66
+ "nauc_mrr_at_3_std": 0.28319239326860024,
67
+ "nauc_mrr_at_5_diff1": 0.42608677451640725,
68
+ "nauc_mrr_at_5_max": 0.48984203095447415,
69
+ "nauc_mrr_at_5_std": 0.28898159247715727,
70
+ "nauc_ndcg_at_1000_diff1": 0.2278918682019971,
71
+ "nauc_ndcg_at_1000_max": 0.4481663789424775,
72
+ "nauc_ndcg_at_1000_std": 0.3837420793823154,
73
+ "nauc_ndcg_at_100_diff1": 0.25967220085645065,
74
+ "nauc_ndcg_at_100_max": 0.39984853839094003,
75
+ "nauc_ndcg_at_100_std": 0.2534768392983764,
76
+ "nauc_ndcg_at_10_diff1": 0.273965968336594,
77
+ "nauc_ndcg_at_10_max": 0.3659713651564094,
78
+ "nauc_ndcg_at_10_std": 0.20424589236122365,
79
+ "nauc_ndcg_at_1_diff1": 0.36292794713467785,
80
+ "nauc_ndcg_at_1_max": 0.3660986732783413,
81
+ "nauc_ndcg_at_1_std": 0.15855911816262802,
82
+ "nauc_ndcg_at_20_diff1": 0.2616999641504782,
83
+ "nauc_ndcg_at_20_max": 0.34488492072576343,
84
+ "nauc_ndcg_at_20_std": 0.18289458188287355,
85
+ "nauc_ndcg_at_3_diff1": 0.28139541939063034,
86
+ "nauc_ndcg_at_3_max": 0.386854724478452,
87
+ "nauc_ndcg_at_3_std": 0.207530389679039,
88
+ "nauc_ndcg_at_5_diff1": 0.26600800359172283,
89
+ "nauc_ndcg_at_5_max": 0.3893203061538645,
90
+ "nauc_ndcg_at_5_std": 0.2187116148535221,
91
+ "nauc_precision_at_1000_diff1": -0.17826067952451596,
92
+ "nauc_precision_at_1000_max": -0.13825781290041472,
93
+ "nauc_precision_at_1000_std": -0.0060784116358309805,
94
+ "nauc_precision_at_100_diff1": -0.06452022431969527,
95
+ "nauc_precision_at_100_max": 0.16303421194427378,
96
+ "nauc_precision_at_100_std": 0.28922976950113194,
97
+ "nauc_precision_at_10_diff1": 0.05567406939009735,
98
+ "nauc_precision_at_10_max": 0.34457093156610624,
99
+ "nauc_precision_at_10_std": 0.403719301611098,
100
+ "nauc_precision_at_1_diff1": 0.45633856251304133,
101
+ "nauc_precision_at_1_max": 0.4430449380655195,
102
+ "nauc_precision_at_1_std": 0.21089591592844825,
103
+ "nauc_precision_at_20_diff1": 0.005386235627559235,
104
+ "nauc_precision_at_20_max": 0.30053448596882265,
105
+ "nauc_precision_at_20_std": 0.3977440996332616,
106
+ "nauc_precision_at_3_diff1": 0.16756359716023034,
107
+ "nauc_precision_at_3_max": 0.40970225066862553,
108
+ "nauc_precision_at_3_std": 0.3147929852828368,
109
+ "nauc_precision_at_5_diff1": 0.11241880468634106,
110
+ "nauc_precision_at_5_max": 0.3991596223243856,
111
+ "nauc_precision_at_5_std": 0.36940234317069537,
112
+ "nauc_recall_at_1000_diff1": 0.1334803147274439,
113
+ "nauc_recall_at_1000_max": 0.4757865473147358,
114
+ "nauc_recall_at_1000_std": 0.5911081647562926,
115
+ "nauc_recall_at_100_diff1": 0.2017524807846271,
116
+ "nauc_recall_at_100_max": 0.3533002677365799,
117
+ "nauc_recall_at_100_std": 0.24576529787647505,
118
+ "nauc_recall_at_10_diff1": 0.22132057222916793,
119
+ "nauc_recall_at_10_max": 0.19258674720213076,
120
+ "nauc_recall_at_10_std": -0.020131203920722478,
121
+ "nauc_recall_at_1_diff1": 0.3318063885681161,
122
+ "nauc_recall_at_1_max": 0.0848391095311529,
123
+ "nauc_recall_at_1_std": -0.1786829026350287,
124
+ "nauc_recall_at_20_diff1": 0.19430233496782667,
125
+ "nauc_recall_at_20_max": 0.23187704362448297,
126
+ "nauc_recall_at_20_std": 0.05279735877325079,
127
+ "nauc_recall_at_3_diff1": 0.24699163521741166,
128
+ "nauc_recall_at_3_max": 0.12238006862331353,
129
+ "nauc_recall_at_3_std": -0.11580494466632163,
130
+ "nauc_recall_at_5_diff1": 0.22494891447184032,
131
+ "nauc_recall_at_5_max": 0.15214801417149623,
132
+ "nauc_recall_at_5_std": -0.07397529818731528,
133
+ "ndcg_at_1": 0.49125,
134
+ "ndcg_at_10": 0.35835,
135
+ "ndcg_at_100": 0.43872,
136
+ "ndcg_at_1000": 0.53663,
137
+ "ndcg_at_20": 0.35819,
138
+ "ndcg_at_3": 0.39915,
139
+ "ndcg_at_5": 0.3767,
140
+ "precision_at_1": 0.6125,
141
+ "precision_at_10": 0.29175,
142
+ "precision_at_100": 0.1217,
143
+ "precision_at_1000": 0.02896,
144
+ "precision_at_20": 0.22737,
145
+ "precision_at_3": 0.43667,
146
+ "precision_at_5": 0.369,
147
+ "recall_at_1": 0.07949,
148
+ "recall_at_10": 0.21837,
149
+ "recall_at_100": 0.53612,
150
+ "recall_at_1000": 0.83405,
151
+ "recall_at_20": 0.28551,
152
+ "recall_at_3": 0.13263,
153
+ "recall_at_5": 0.16843
154
+ }
155
+ ]
156
+ },
157
+ "task_name": "DBPedia-PLHardNegatives"
158
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/DBPediaHardNegatives.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "943ec7fdfef3728b2ad1966c5b6479ff9ffd26c9",
3
+ "evaluation_time": 74.08827352523804,
4
+ "kg_co2_emissions": 0.006010634823756838,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "eng-Latn"
12
+ ],
13
+ "main_score": 0.4249,
14
+ "map_at_1": 0.09166,
15
+ "map_at_10": 0.19895,
16
+ "map_at_100": 0.30669,
17
+ "map_at_1000": 0.34317,
18
+ "map_at_20": 0.23138,
19
+ "map_at_3": 0.14614,
20
+ "map_at_5": 0.16877,
21
+ "mrr_at_1": 0.6725,
22
+ "mrr_at_10": 0.7648293650793652,
23
+ "mrr_at_100": 0.7679321639512858,
24
+ "mrr_at_1000": 0.7680543066265476,
25
+ "mrr_at_20": 0.7670718606299102,
26
+ "mrr_at_3": 0.7541666666666668,
27
+ "mrr_at_5": 0.7605416666666668,
28
+ "nauc_map_at_1000_diff1": 0.24196560766959146,
29
+ "nauc_map_at_1000_max": 0.3500620094304942,
30
+ "nauc_map_at_1000_std": 0.09477374363100453,
31
+ "nauc_map_at_100_diff1": 0.2782180849101189,
32
+ "nauc_map_at_100_max": 0.3922142602715834,
33
+ "nauc_map_at_100_std": 0.07735276265663345,
34
+ "nauc_map_at_10_diff1": 0.29441086302301134,
35
+ "nauc_map_at_10_max": 0.2926684006230862,
36
+ "nauc_map_at_10_std": -0.09838708471507687,
37
+ "nauc_map_at_1_diff1": 0.37763669319245063,
38
+ "nauc_map_at_1_max": 0.21706354111805534,
39
+ "nauc_map_at_1_std": -0.17604363708948723,
40
+ "nauc_map_at_20_diff1": 0.2945289782184002,
41
+ "nauc_map_at_20_max": 0.3418917869830716,
42
+ "nauc_map_at_20_std": -0.04184684751242592,
43
+ "nauc_map_at_3_diff1": 0.2976908916627192,
44
+ "nauc_map_at_3_max": 0.22456048774619997,
45
+ "nauc_map_at_3_std": -0.1834891940127642,
46
+ "nauc_map_at_5_diff1": 0.31090567825546683,
47
+ "nauc_map_at_5_max": 0.26243748745102946,
48
+ "nauc_map_at_5_std": -0.15583581014553555,
49
+ "nauc_mrr_at_1000_diff1": 0.4924733633760865,
50
+ "nauc_mrr_at_1000_max": 0.6650451352111225,
51
+ "nauc_mrr_at_1000_std": 0.2518130641941758,
52
+ "nauc_mrr_at_100_diff1": 0.4923291599278689,
53
+ "nauc_mrr_at_100_max": 0.6650636423727867,
54
+ "nauc_mrr_at_100_std": 0.2520676099373147,
55
+ "nauc_mrr_at_10_diff1": 0.4929532466580933,
56
+ "nauc_mrr_at_10_max": 0.6657274586587653,
57
+ "nauc_mrr_at_10_std": 0.254284911345197,
58
+ "nauc_mrr_at_1_diff1": 0.508241279131593,
59
+ "nauc_mrr_at_1_max": 0.6650701625286584,
60
+ "nauc_mrr_at_1_std": 0.2181014645018867,
61
+ "nauc_mrr_at_20_diff1": 0.49284700361206785,
62
+ "nauc_mrr_at_20_max": 0.6654570887065819,
63
+ "nauc_mrr_at_20_std": 0.251406122441712,
64
+ "nauc_mrr_at_3_diff1": 0.485801366548944,
65
+ "nauc_mrr_at_3_max": 0.6605432732962225,
66
+ "nauc_mrr_at_3_std": 0.24841345566855516,
67
+ "nauc_mrr_at_5_diff1": 0.48862884003622675,
68
+ "nauc_mrr_at_5_max": 0.6578027404707166,
69
+ "nauc_mrr_at_5_std": 0.2517779157465479,
70
+ "nauc_ndcg_at_1000_diff1": 0.29499055192376245,
71
+ "nauc_ndcg_at_1000_max": 0.4788298451496829,
72
+ "nauc_ndcg_at_1000_std": 0.2588051987403721,
73
+ "nauc_ndcg_at_100_diff1": 0.3021437298356323,
74
+ "nauc_ndcg_at_100_max": 0.4683807179987565,
75
+ "nauc_ndcg_at_100_std": 0.14501262092277376,
76
+ "nauc_ndcg_at_10_diff1": 0.30869055026314446,
77
+ "nauc_ndcg_at_10_max": 0.45503240031119985,
78
+ "nauc_ndcg_at_10_std": 0.13235756276934446,
79
+ "nauc_ndcg_at_1_diff1": 0.4586000987246831,
80
+ "nauc_ndcg_at_1_max": 0.5705531529274819,
81
+ "nauc_ndcg_at_1_std": 0.19204345359571498,
82
+ "nauc_ndcg_at_20_diff1": 0.30564227103516933,
83
+ "nauc_ndcg_at_20_max": 0.4478009645537897,
84
+ "nauc_ndcg_at_20_std": 0.08684614206271321,
85
+ "nauc_ndcg_at_3_diff1": 0.2960406272412828,
86
+ "nauc_ndcg_at_3_max": 0.4684127626907895,
87
+ "nauc_ndcg_at_3_std": 0.14389987840491153,
88
+ "nauc_ndcg_at_5_diff1": 0.3153454210052153,
89
+ "nauc_ndcg_at_5_max": 0.4658778086560726,
90
+ "nauc_ndcg_at_5_std": 0.14890885845439403,
91
+ "nauc_precision_at_1000_diff1": -0.19621339856087788,
92
+ "nauc_precision_at_1000_max": -0.2264519527364171,
93
+ "nauc_precision_at_1000_std": -0.1008956583575431,
94
+ "nauc_precision_at_100_diff1": -0.09528594804579663,
95
+ "nauc_precision_at_100_max": 0.031908251245593565,
96
+ "nauc_precision_at_100_std": 0.20707020910579915,
97
+ "nauc_precision_at_10_diff1": 0.027211327832872556,
98
+ "nauc_precision_at_10_max": 0.2824235948463794,
99
+ "nauc_precision_at_10_std": 0.30778465202880506,
100
+ "nauc_precision_at_1_diff1": 0.508241279131593,
101
+ "nauc_precision_at_1_max": 0.6650701625286584,
102
+ "nauc_precision_at_1_std": 0.2181014645018867,
103
+ "nauc_precision_at_20_diff1": -0.009408017724849203,
104
+ "nauc_precision_at_20_max": 0.23827398124427834,
105
+ "nauc_precision_at_20_std": 0.3015600021540615,
106
+ "nauc_precision_at_3_diff1": 0.09800487166965235,
107
+ "nauc_precision_at_3_max": 0.3688827667283664,
108
+ "nauc_precision_at_3_std": 0.19629028738694707,
109
+ "nauc_precision_at_5_diff1": 0.0853013092038257,
110
+ "nauc_precision_at_5_max": 0.33629163791920336,
111
+ "nauc_precision_at_5_std": 0.24772268742023454,
112
+ "nauc_recall_at_1000_diff1": 0.14088292690927726,
113
+ "nauc_recall_at_1000_max": 0.35933887119516084,
114
+ "nauc_recall_at_1000_std": 0.5239331170054414,
115
+ "nauc_recall_at_100_diff1": 0.201361419176772,
116
+ "nauc_recall_at_100_max": 0.34245879519063654,
117
+ "nauc_recall_at_100_std": 0.1219973511039038,
118
+ "nauc_recall_at_10_diff1": 0.2500994191714857,
119
+ "nauc_recall_at_10_max": 0.2527146578769317,
120
+ "nauc_recall_at_10_std": -0.10558974351043732,
121
+ "nauc_recall_at_1_diff1": 0.37763669319245063,
122
+ "nauc_recall_at_1_max": 0.21706354111805534,
123
+ "nauc_recall_at_1_std": -0.17604363708948723,
124
+ "nauc_recall_at_20_diff1": 0.2432935819284644,
125
+ "nauc_recall_at_20_max": 0.30541687587290894,
126
+ "nauc_recall_at_20_std": -0.05708679236319259,
127
+ "nauc_recall_at_3_diff1": 0.26413770649519225,
128
+ "nauc_recall_at_3_max": 0.1922630835030075,
129
+ "nauc_recall_at_3_std": -0.18834619450957715,
130
+ "nauc_recall_at_5_diff1": 0.287625834198222,
131
+ "nauc_recall_at_5_max": 0.22968868794770478,
132
+ "nauc_recall_at_5_std": -0.16947588926658871,
133
+ "ndcg_at_1": 0.55,
134
+ "ndcg_at_10": 0.4249,
135
+ "ndcg_at_100": 0.51908,
136
+ "ndcg_at_1000": 0.60999,
137
+ "ndcg_at_20": 0.42576,
138
+ "ndcg_at_3": 0.47634,
139
+ "ndcg_at_5": 0.44994,
140
+ "precision_at_1": 0.6725,
141
+ "precision_at_10": 0.34075,
142
+ "precision_at_100": 0.1453,
143
+ "precision_at_1000": 0.03222,
144
+ "precision_at_20": 0.27175,
145
+ "precision_at_3": 0.51667,
146
+ "precision_at_5": 0.4415,
147
+ "recall_at_1": 0.09166,
148
+ "recall_at_10": 0.24883,
149
+ "recall_at_100": 0.62673,
150
+ "recall_at_1000": 0.90549,
151
+ "recall_at_20": 0.32822,
152
+ "recall_at_3": 0.15974,
153
+ "recall_at_5": 0.19545
154
+ }
155
+ ]
156
+ },
157
+ "task_name": "DBPediaHardNegatives"
158
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/DBpediaClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "9abd46cf7fc8b4c64290f26993c540b92aa145ac",
3
+ "evaluation_time": 8.329654932022095,
4
+ "kg_co2_emissions": 0.00034011843116958976,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.87431640625,
10
+ "f1": 0.8670728764246955,
11
+ "f1_weighted": 0.867124626862193,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "eng-Latn"
15
+ ],
16
+ "main_score": 0.87431640625,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.865234375,
20
+ "f1": 0.8558541649784533,
21
+ "f1_weighted": 0.8559273334833993
22
+ },
23
+ {
24
+ "accuracy": 0.86962890625,
25
+ "f1": 0.8619886174318978,
26
+ "f1_weighted": 0.8620533446342137
27
+ },
28
+ {
29
+ "accuracy": 0.87841796875,
30
+ "f1": 0.8722295287093862,
31
+ "f1_weighted": 0.8723173913641139
32
+ },
33
+ {
34
+ "accuracy": 0.87451171875,
35
+ "f1": 0.8669617816207321,
36
+ "f1_weighted": 0.8669780769379867
37
+ },
38
+ {
39
+ "accuracy": 0.87890625,
40
+ "f1": 0.8716509642805589,
41
+ "f1_weighted": 0.8717142131082336
42
+ },
43
+ {
44
+ "accuracy": 0.8896484375,
45
+ "f1": 0.884578392496892,
46
+ "f1_weighted": 0.8846015517412362
47
+ },
48
+ {
49
+ "accuracy": 0.89013671875,
50
+ "f1": 0.8856610689995267,
51
+ "f1_weighted": 0.8856989455006815
52
+ },
53
+ {
54
+ "accuracy": 0.86083984375,
55
+ "f1": 0.8537905109340246,
56
+ "f1_weighted": 0.8538311662883127
57
+ },
58
+ {
59
+ "accuracy": 0.88134765625,
60
+ "f1": 0.8740808504997579,
61
+ "f1_weighted": 0.8740721874833495
62
+ },
63
+ {
64
+ "accuracy": 0.8544921875,
65
+ "f1": 0.8439328842957267,
66
+ "f1_weighted": 0.8440520580804016
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "DBpediaClassification"
73
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/DalajClassification.json ADDED
@@ -0,0 +1,95 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "7ebf0b4caa7b2ae39698a889de782c09e6f5ee56",
3
+ "evaluation_time": 7.014214992523193,
4
+ "kg_co2_emissions": 0.00021165757706540655,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.4995495495495496,
10
+ "ap": 0.49978501201164116,
11
+ "ap_weighted": 0.49978501201164116,
12
+ "f1": 0.49760462839897873,
13
+ "f1_weighted": 0.49760462839897873,
14
+ "hf_subset": "default",
15
+ "languages": [
16
+ "swe-Latn"
17
+ ],
18
+ "main_score": 0.4995495495495496,
19
+ "scores_per_experiment": [
20
+ {
21
+ "accuracy": 0.4988738738738739,
22
+ "ap": 0.49943804751497056,
23
+ "ap_weighted": 0.49943804751497056,
24
+ "f1": 0.4963387821431986,
25
+ "f1_weighted": 0.4963387821431986
26
+ },
27
+ {
28
+ "accuracy": 0.5022522522522522,
29
+ "ap": 0.501131043707463,
30
+ "ap_weighted": 0.501131043707463,
31
+ "f1": 0.5021285016261854,
32
+ "f1_weighted": 0.5021285016261854
33
+ },
34
+ {
35
+ "accuracy": 0.5,
36
+ "ap": 0.5,
37
+ "ap_weighted": 0.5,
38
+ "f1": 0.49974623926592093,
39
+ "f1_weighted": 0.49974623926592093
40
+ },
41
+ {
42
+ "accuracy": 0.5,
43
+ "ap": 0.5,
44
+ "ap_weighted": 0.5,
45
+ "f1": 0.49908270867742344,
46
+ "f1_weighted": 0.4990827086774235
47
+ },
48
+ {
49
+ "accuracy": 0.5067567567567568,
50
+ "ap": 0.5034199158174568,
51
+ "ap_weighted": 0.5034199158174568,
52
+ "f1": 0.5055427862416859,
53
+ "f1_weighted": 0.5055427862416858
54
+ },
55
+ {
56
+ "accuracy": 0.49774774774774777,
57
+ "ap": 0.49887926203715677,
58
+ "ap_weighted": 0.49887926203715677,
59
+ "f1": 0.4973168094147751,
60
+ "f1_weighted": 0.4973168094147751
61
+ },
62
+ {
63
+ "accuracy": 0.5,
64
+ "ap": 0.5,
65
+ "ap_weighted": 0.5,
66
+ "f1": 0.4970507194611695,
67
+ "f1_weighted": 0.4970507194611695
68
+ },
69
+ {
70
+ "accuracy": 0.49436936936936937,
71
+ "ap": 0.4972194416638861,
72
+ "ap_weighted": 0.4972194416638861,
73
+ "f1": 0.493392188030083,
74
+ "f1_weighted": 0.49339218803008295
75
+ },
76
+ {
77
+ "accuracy": 0.4988738738738739,
78
+ "ap": 0.49943863803380417,
79
+ "ap_weighted": 0.49943863803380417,
80
+ "f1": 0.4906255035287293,
81
+ "f1_weighted": 0.4906255035287293
82
+ },
83
+ {
84
+ "accuracy": 0.4966216216216216,
85
+ "ap": 0.49832377134167416,
86
+ "ap_weighted": 0.49832377134167416,
87
+ "f1": 0.49482204560061593,
88
+ "f1_weighted": 0.494822045600616
89
+ }
90
+ ]
91
+ }
92
+ ]
93
+ },
94
+ "task_name": "DalajClassification"
95
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/DiaBlaBitextMining.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "5345895c56a601afe1a98519ce3199be60a27dba",
3
+ "evaluation_time": 14.103885173797607,
4
+ "kg_co2_emissions": 0.0005857981666713344,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.8677800974251914,
10
+ "f1": 0.8447879644904698,
11
+ "hf_subset": "fr-en",
12
+ "languages": [
13
+ "fra-Latn",
14
+ "eng-Latn"
15
+ ],
16
+ "main_score": 0.8447879644904698,
17
+ "precision": 0.8356241873539005,
18
+ "recall": 0.8677800974251914
19
+ },
20
+ {
21
+ "accuracy": 0.8677800974251914,
22
+ "f1": 0.8447879644904698,
23
+ "hf_subset": "en-fr",
24
+ "languages": [
25
+ "eng-Latn",
26
+ "fra-Latn"
27
+ ],
28
+ "main_score": 0.8447879644904698,
29
+ "precision": 0.8356241873539005,
30
+ "recall": 0.8677800974251914
31
+ }
32
+ ]
33
+ },
34
+ "task_name": "DiaBlaBitextMining"
35
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/EstonianValenceClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "9157397f05a127b3ac93b93dd88abf1bdf710c22",
3
+ "evaluation_time": 8.083175420761108,
4
+ "kg_co2_emissions": 0.00027569421814583326,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.3907090464547677,
10
+ "f1": 0.3612354978603326,
11
+ "f1_weighted": 0.4106210931311315,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "est-Latn"
15
+ ],
16
+ "main_score": 0.3907090464547677,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.4119804400977995,
20
+ "f1": 0.37796786787984404,
21
+ "f1_weighted": 0.43117658573763573
22
+ },
23
+ {
24
+ "accuracy": 0.3899755501222494,
25
+ "f1": 0.35089987724920546,
26
+ "f1_weighted": 0.41686800277767677
27
+ },
28
+ {
29
+ "accuracy": 0.37163814180929094,
30
+ "f1": 0.34052464917553776,
31
+ "f1_weighted": 0.39046807191413674
32
+ },
33
+ {
34
+ "accuracy": 0.3899755501222494,
35
+ "f1": 0.3512335552700845,
36
+ "f1_weighted": 0.41061027277697615
37
+ },
38
+ {
39
+ "accuracy": 0.3765281173594132,
40
+ "f1": 0.3678961803256607,
41
+ "f1_weighted": 0.39719664095303675
42
+ },
43
+ {
44
+ "accuracy": 0.3508557457212714,
45
+ "f1": 0.34680109493377226,
46
+ "f1_weighted": 0.3658045128398561
47
+ },
48
+ {
49
+ "accuracy": 0.4193154034229829,
50
+ "f1": 0.38759093922726734,
51
+ "f1_weighted": 0.4428166618828878
52
+ },
53
+ {
54
+ "accuracy": 0.3960880195599022,
55
+ "f1": 0.3550184867728854,
56
+ "f1_weighted": 0.4081623863955955
57
+ },
58
+ {
59
+ "accuracy": 0.3960880195599022,
60
+ "f1": 0.3699146248115177,
61
+ "f1_weighted": 0.41871712115236975
62
+ },
63
+ {
64
+ "accuracy": 0.40464547677261614,
65
+ "f1": 0.3645077029575511,
66
+ "f1_weighted": 0.42439067488114346
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "EstonianValenceClassification"
73
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/FEVERHardNegatives.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "080c9ed6267b65029207906e815d44a9240bafca",
3
+ "evaluation_time": 245.4683961868286,
4
+ "kg_co2_emissions": 0.022226310188709485,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "eng-Latn"
12
+ ],
13
+ "main_score": 0.92883,
14
+ "map_at_1": 0.85655,
15
+ "map_at_10": 0.90446,
16
+ "map_at_100": 0.90673,
17
+ "map_at_1000": 0.90682,
18
+ "map_at_20": 0.90592,
19
+ "map_at_3": 0.8968,
20
+ "map_at_5": 0.90198,
21
+ "mrr_at_1": 0.91,
22
+ "mrr_at_10": 0.9477111111111113,
23
+ "mrr_at_100": 0.9477944444444447,
24
+ "mrr_at_1000": 0.9477944444444447,
25
+ "mrr_at_20": 0.9477944444444447,
26
+ "mrr_at_3": 0.944666666666667,
27
+ "mrr_at_5": 0.9472666666666671,
28
+ "nauc_map_at_1000_diff1": 0.48447862160449984,
29
+ "nauc_map_at_1000_max": 0.3138767467112793,
30
+ "nauc_map_at_1000_std": -0.05936365696107634,
31
+ "nauc_map_at_100_diff1": 0.4843075339671329,
32
+ "nauc_map_at_100_max": 0.3139711006865051,
33
+ "nauc_map_at_100_std": -0.05941029942906058,
34
+ "nauc_map_at_10_diff1": 0.477100679400768,
35
+ "nauc_map_at_10_max": 0.30570151142488644,
36
+ "nauc_map_at_10_std": -0.061570295530436615,
37
+ "nauc_map_at_1_diff1": 0.5353645042469993,
38
+ "nauc_map_at_1_max": 0.33479096687815324,
39
+ "nauc_map_at_1_std": -0.0916379365751488,
40
+ "nauc_map_at_20_diff1": 0.48275569849532085,
41
+ "nauc_map_at_20_max": 0.3111815478358615,
42
+ "nauc_map_at_20_std": -0.05981482934457556,
43
+ "nauc_map_at_3_diff1": 0.4655103804553752,
44
+ "nauc_map_at_3_max": 0.27950077283093566,
45
+ "nauc_map_at_3_std": -0.06440926389934667,
46
+ "nauc_map_at_5_diff1": 0.4734453518596214,
47
+ "nauc_map_at_5_max": 0.30272050578031673,
48
+ "nauc_map_at_5_std": -0.0690384918766065,
49
+ "nauc_mrr_at_1000_diff1": 0.8037382453247334,
50
+ "nauc_mrr_at_1000_max": 0.48943953446016053,
51
+ "nauc_mrr_at_1000_std": -0.2609363279915229,
52
+ "nauc_mrr_at_100_diff1": 0.8037382453247334,
53
+ "nauc_mrr_at_100_max": 0.48943953446016053,
54
+ "nauc_mrr_at_100_std": -0.2609363279915229,
55
+ "nauc_mrr_at_10_diff1": 0.8038427015515098,
56
+ "nauc_mrr_at_10_max": 0.48923017995979057,
57
+ "nauc_mrr_at_10_std": -0.26141851414031914,
58
+ "nauc_mrr_at_1_diff1": 0.8026143790849659,
59
+ "nauc_mrr_at_1_max": 0.4916225749559069,
60
+ "nauc_mrr_at_1_std": -0.23261230418093226,
61
+ "nauc_mrr_at_20_diff1": 0.8037382453247334,
62
+ "nauc_mrr_at_20_max": 0.48943953446016053,
63
+ "nauc_mrr_at_20_std": -0.2609363279915229,
64
+ "nauc_mrr_at_3_diff1": 0.8014129346517735,
65
+ "nauc_mrr_at_3_max": 0.47256251898349855,
66
+ "nauc_mrr_at_3_std": -0.2680751015265545,
67
+ "nauc_mrr_at_5_diff1": 0.805082800081685,
68
+ "nauc_mrr_at_5_max": 0.4935350246293212,
69
+ "nauc_mrr_at_5_std": -0.2639806660127199,
70
+ "nauc_ndcg_at_1000_diff1": 0.5200445059935517,
71
+ "nauc_ndcg_at_1000_max": 0.34230067359557953,
72
+ "nauc_ndcg_at_1000_std": -0.06174251958447874,
73
+ "nauc_ndcg_at_100_diff1": 0.5133174412857602,
74
+ "nauc_ndcg_at_100_max": 0.3482945666574494,
75
+ "nauc_ndcg_at_100_std": -0.05250362988489303,
76
+ "nauc_ndcg_at_10_diff1": 0.48426000442964784,
77
+ "nauc_ndcg_at_10_max": 0.3157518108523079,
78
+ "nauc_ndcg_at_10_std": -0.05443006695578037,
79
+ "nauc_ndcg_at_1_diff1": 0.8026143790849659,
80
+ "nauc_ndcg_at_1_max": 0.4916225749559069,
81
+ "nauc_ndcg_at_1_std": -0.23261230418093226,
82
+ "nauc_ndcg_at_20_diff1": 0.500492261156621,
83
+ "nauc_ndcg_at_20_max": 0.3313734585090867,
84
+ "nauc_ndcg_at_20_std": -0.05119586066582514,
85
+ "nauc_ndcg_at_3_diff1": 0.4826081629015269,
86
+ "nauc_ndcg_at_3_max": 0.2807453679457889,
87
+ "nauc_ndcg_at_3_std": -0.07912514435800086,
88
+ "nauc_ndcg_at_5_diff1": 0.4805536408548324,
89
+ "nauc_ndcg_at_5_max": 0.3138353582801571,
90
+ "nauc_ndcg_at_5_std": -0.0708887421988285,
91
+ "nauc_precision_at_1000_diff1": -0.06292679608641918,
92
+ "nauc_precision_at_1000_max": -0.022003499397241336,
93
+ "nauc_precision_at_1000_std": 0.024420199760407386,
94
+ "nauc_precision_at_100_diff1": -0.054787950165142485,
95
+ "nauc_precision_at_100_max": 0.040158992855293535,
96
+ "nauc_precision_at_100_std": 0.0385253745093807,
97
+ "nauc_precision_at_10_diff1": -0.05961336286528217,
98
+ "nauc_precision_at_10_max": 0.023731027936265376,
99
+ "nauc_precision_at_10_std": 0.012090065630324639,
100
+ "nauc_precision_at_1_diff1": 0.8026143790849659,
101
+ "nauc_precision_at_1_max": 0.4916225749559069,
102
+ "nauc_precision_at_1_std": -0.23261230418093226,
103
+ "nauc_precision_at_20_diff1": -0.05211422852957672,
104
+ "nauc_precision_at_20_max": 0.028803689656921164,
105
+ "nauc_precision_at_20_std": 0.03589221096599635,
106
+ "nauc_precision_at_3_diff1": 0.11891627489877438,
107
+ "nauc_precision_at_3_max": -0.005389639211211579,
108
+ "nauc_precision_at_3_std": -0.12895277951393938,
109
+ "nauc_precision_at_5_diff1": 5.0355002764823806e-05,
110
+ "nauc_precision_at_5_max": 0.08380511175213497,
111
+ "nauc_precision_at_5_std": -0.08786947983282639,
112
+ "nauc_recall_at_1000_diff1": -0.13520318761998523,
113
+ "nauc_recall_at_1000_max": -0.033372508967826384,
114
+ "nauc_recall_at_1000_std": 0.734838528798478,
115
+ "nauc_recall_at_100_diff1": 0.02897092323755865,
116
+ "nauc_recall_at_100_max": 0.34168366758317387,
117
+ "nauc_recall_at_100_std": 0.6186203926441324,
118
+ "nauc_recall_at_10_diff1": 0.12379457195345477,
119
+ "nauc_recall_at_10_max": 0.13243536807531447,
120
+ "nauc_recall_at_10_std": 0.21836845973925426,
121
+ "nauc_recall_at_1_diff1": 0.5353645042469993,
122
+ "nauc_recall_at_1_max": 0.33479096687815324,
123
+ "nauc_recall_at_1_std": -0.0916379365751488,
124
+ "nauc_recall_at_20_diff1": 0.12233730427668768,
125
+ "nauc_recall_at_20_max": 0.18938944400118288,
126
+ "nauc_recall_at_20_std": 0.3440481510351426,
127
+ "nauc_recall_at_3_diff1": 0.23811561440223675,
128
+ "nauc_recall_at_3_max": 0.09345676515608332,
129
+ "nauc_recall_at_3_std": 0.03836551401334302,
130
+ "nauc_recall_at_5_diff1": 0.17153884378923456,
131
+ "nauc_recall_at_5_max": 0.16357496770720623,
132
+ "nauc_recall_at_5_std": 0.09583603374178096,
133
+ "ndcg_at_1": 0.91,
134
+ "ndcg_at_10": 0.92883,
135
+ "ndcg_at_100": 0.93521,
136
+ "ndcg_at_1000": 0.93662,
137
+ "ndcg_at_20": 0.93197,
138
+ "ndcg_at_3": 0.91949,
139
+ "ndcg_at_5": 0.9253,
140
+ "precision_at_1": 0.91,
141
+ "precision_at_10": 0.1051,
142
+ "precision_at_100": 0.01113,
143
+ "precision_at_1000": 0.00114,
144
+ "precision_at_20": 0.0537,
145
+ "precision_at_3": 0.33533,
146
+ "precision_at_5": 0.2062,
147
+ "recall_at_1": 0.85655,
148
+ "recall_at_10": 0.96173,
149
+ "recall_at_100": 0.98449,
150
+ "recall_at_1000": 0.9929,
151
+ "recall_at_20": 0.97169,
152
+ "recall_at_3": 0.9357,
153
+ "recall_at_5": 0.95191
154
+ }
155
+ ]
156
+ },
157
+ "task_name": "FEVERHardNegatives"
158
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/FaroeseSTS.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "8cb36efa69428b3dc290e1125995a999963163c5",
3
+ "evaluation_time": 0.8039565086364746,
4
+ "kg_co2_emissions": 3.436595831303645e-05,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "train": [
8
+ {
9
+ "cosine_pearson": 0.7123591930484869,
10
+ "cosine_spearman": 0.7171043504672766,
11
+ "euclidean_pearson": 0.7172974543194084,
12
+ "euclidean_spearman": 0.7171043504672766,
13
+ "hf_subset": "default",
14
+ "languages": [
15
+ "fao-Latn"
16
+ ],
17
+ "main_score": 0.7171043504672766,
18
+ "manhattan_pearson": 0.7177633588668217,
19
+ "manhattan_spearman": 0.7175938929782879,
20
+ "pearson": 0.7123591930484869,
21
+ "spearman": 0.7171043504672766
22
+ }
23
+ ]
24
+ },
25
+ "task_name": "FaroeseSTS"
26
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/FilipinoShopeeReviewsClassification.json ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "d096f402fdc76886458c0cfb5dedc829bea2b935",
3
+ "evaluation_time": 15.85331916809082,
4
+ "kg_co2_emissions": 0.0005454851543177007,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.3189453125,
10
+ "f1": 0.3108226092483305,
11
+ "f1_weighted": 0.3108215277247769,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "fil-Latn"
15
+ ],
16
+ "main_score": 0.3189453125,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.32421875,
20
+ "f1": 0.3210939157006938,
21
+ "f1_weighted": 0.3211276166390551
22
+ },
23
+ {
24
+ "accuracy": 0.31982421875,
25
+ "f1": 0.3121743222436116,
26
+ "f1_weighted": 0.3121928877891662
27
+ },
28
+ {
29
+ "accuracy": 0.3193359375,
30
+ "f1": 0.3107398351421936,
31
+ "f1_weighted": 0.31072461191168105
32
+ },
33
+ {
34
+ "accuracy": 0.32666015625,
35
+ "f1": 0.3204146672343485,
36
+ "f1_weighted": 0.320408514335056
37
+ },
38
+ {
39
+ "accuracy": 0.314453125,
40
+ "f1": 0.3043510828808571,
41
+ "f1_weighted": 0.30431925164936124
42
+ },
43
+ {
44
+ "accuracy": 0.298828125,
45
+ "f1": 0.28319193555321237,
46
+ "f1_weighted": 0.2831778294015872
47
+ },
48
+ {
49
+ "accuracy": 0.30322265625,
50
+ "f1": 0.300233169654266,
51
+ "f1_weighted": 0.3002544540088299
52
+ },
53
+ {
54
+ "accuracy": 0.35791015625,
55
+ "f1": 0.3466134374349078,
56
+ "f1_weighted": 0.34659366471337616
57
+ },
58
+ {
59
+ "accuracy": 0.3310546875,
60
+ "f1": 0.31867565928405267,
61
+ "f1_weighted": 0.31865240183305654
62
+ },
63
+ {
64
+ "accuracy": 0.2939453125,
65
+ "f1": 0.29073806735516106,
66
+ "f1_weighted": 0.29076404496659936
67
+ }
68
+ ]
69
+ }
70
+ ],
71
+ "validation": [
72
+ {
73
+ "accuracy": 0.320947265625,
74
+ "f1": 0.31280693572155377,
75
+ "f1_weighted": 0.3127987552965525,
76
+ "hf_subset": "default",
77
+ "languages": [
78
+ "fil-Latn"
79
+ ],
80
+ "main_score": 0.320947265625,
81
+ "scores_per_experiment": [
82
+ {
83
+ "accuracy": 0.3408203125,
84
+ "f1": 0.33826365140067977,
85
+ "f1_weighted": 0.3382627676235014
86
+ },
87
+ {
88
+ "accuracy": 0.31103515625,
89
+ "f1": 0.3034248893926656,
90
+ "f1_weighted": 0.30344558907670505
91
+ },
92
+ {
93
+ "accuracy": 0.3251953125,
94
+ "f1": 0.3192419606507152,
95
+ "f1_weighted": 0.3192184429966553
96
+ },
97
+ {
98
+ "accuracy": 0.35400390625,
99
+ "f1": 0.34590311032642135,
100
+ "f1_weighted": 0.3459031044366191
101
+ },
102
+ {
103
+ "accuracy": 0.32421875,
104
+ "f1": 0.3139044833461365,
105
+ "f1_weighted": 0.31386904636730517
106
+ },
107
+ {
108
+ "accuracy": 0.30517578125,
109
+ "f1": 0.29033670653437194,
110
+ "f1_weighted": 0.2903266434784454
111
+ },
112
+ {
113
+ "accuracy": 0.26171875,
114
+ "f1": 0.2578140635074745,
115
+ "f1_weighted": 0.2578295145527752
116
+ },
117
+ {
118
+ "accuracy": 0.3662109375,
119
+ "f1": 0.35649662666834814,
120
+ "f1_weighted": 0.3564700881168724
121
+ },
122
+ {
123
+ "accuracy": 0.3291015625,
124
+ "f1": 0.3135430556159218,
125
+ "f1_weighted": 0.3135077978002893
126
+ },
127
+ {
128
+ "accuracy": 0.2919921875,
129
+ "f1": 0.28914080977280265,
130
+ "f1_weighted": 0.2891545585163566
131
+ }
132
+ ]
133
+ }
134
+ ]
135
+ },
136
+ "task_name": "FilipinoShopeeReviewsClassification"
137
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/FinParaSTS.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "e4428e399de70a21b8857464e76f0fe859cabe05",
3
+ "evaluation_time": 1.9246649742126465,
4
+ "kg_co2_emissions": 9.187305690969147e-05,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "cosine_pearson": 0.21184538842335984,
10
+ "cosine_spearman": 0.19034677628062427,
11
+ "euclidean_pearson": 0.2052840989831833,
12
+ "euclidean_spearman": 0.19034677628062427,
13
+ "hf_subset": "default",
14
+ "languages": [
15
+ "fin-Latn"
16
+ ],
17
+ "main_score": 0.19034677628062427,
18
+ "manhattan_pearson": 0.20273263360611785,
19
+ "manhattan_spearman": 0.18803757193263637,
20
+ "pearson": 0.21184538842335984,
21
+ "spearman": 0.19034677628062427
22
+ }
23
+ ],
24
+ "validation": [
25
+ {
26
+ "cosine_pearson": 0.2127276739118958,
27
+ "cosine_spearman": 0.20764822036005762,
28
+ "euclidean_pearson": 0.21129186753452692,
29
+ "euclidean_spearman": 0.20764822036005762,
30
+ "hf_subset": "default",
31
+ "languages": [
32
+ "fin-Latn"
33
+ ],
34
+ "main_score": 0.20764822036005762,
35
+ "manhattan_pearson": 0.21082242420529543,
36
+ "manhattan_spearman": 0.2077436393209813,
37
+ "pearson": 0.2127276739118958,
38
+ "spearman": 0.20764822036005762
39
+ }
40
+ ]
41
+ },
42
+ "task_name": "FinParaSTS"
43
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/FinancialPhrasebankClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "1484d06fe7af23030c7c977b12556108d1f67039",
3
+ "evaluation_time": 6.982546806335449,
4
+ "kg_co2_emissions": 0.0002427728404439563,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "train": [
8
+ {
9
+ "accuracy": 0.8010159010600706,
10
+ "f1": 0.7789394418797568,
11
+ "f1_weighted": 0.8028217467905169,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "eng-Latn"
15
+ ],
16
+ "main_score": 0.8010159010600706,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.8608657243816255,
20
+ "f1": 0.832077368206586,
21
+ "f1_weighted": 0.8592213986670202
22
+ },
23
+ {
24
+ "accuracy": 0.8158127208480566,
25
+ "f1": 0.785581556830525,
26
+ "f1_weighted": 0.8136221025102734
27
+ },
28
+ {
29
+ "accuracy": 0.7420494699646644,
30
+ "f1": 0.7301288970662559,
31
+ "f1_weighted": 0.7476362473701156
32
+ },
33
+ {
34
+ "accuracy": 0.821113074204947,
35
+ "f1": 0.799003451648209,
36
+ "f1_weighted": 0.8216055053932066
37
+ },
38
+ {
39
+ "accuracy": 0.8502650176678446,
40
+ "f1": 0.8236066787302073,
41
+ "f1_weighted": 0.8484507438630615
42
+ },
43
+ {
44
+ "accuracy": 0.6890459363957597,
45
+ "f1": 0.7019630208586332,
46
+ "f1_weighted": 0.6990131439357697
47
+ },
48
+ {
49
+ "accuracy": 0.7716431095406361,
50
+ "f1": 0.7399125719243159,
51
+ "f1_weighted": 0.7754563246093719
52
+ },
53
+ {
54
+ "accuracy": 0.8007950530035336,
55
+ "f1": 0.7783765045471848,
56
+ "f1_weighted": 0.8035734229646323
57
+ },
58
+ {
59
+ "accuracy": 0.838339222614841,
60
+ "f1": 0.8047732178162619,
61
+ "f1_weighted": 0.8386703899487872
62
+ },
63
+ {
64
+ "accuracy": 0.8202296819787986,
65
+ "f1": 0.793971151169389,
66
+ "f1_weighted": 0.8209681886429303
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "FinancialPhrasebankClassification"
73
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/FloresBitextMining.json ADDED
The diff for this file is too large to render. See raw diff
 
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/GermanSTSBenchmark.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "e36907544d44c3a247898ed81540310442329e20",
3
+ "evaluation_time": 2.989161252975464,
4
+ "kg_co2_emissions": 0.0001398428051501571,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "cosine_pearson": 0.8158917303626811,
10
+ "cosine_spearman": 0.8183641679910764,
11
+ "euclidean_pearson": 0.810509965182625,
12
+ "euclidean_spearman": 0.8183621668595213,
13
+ "hf_subset": "default",
14
+ "languages": [
15
+ "deu-Latn"
16
+ ],
17
+ "main_score": 0.8183641679910764,
18
+ "manhattan_pearson": 0.8089964740720768,
19
+ "manhattan_spearman": 0.8168725672147809,
20
+ "pearson": 0.8158917303626811,
21
+ "spearman": 0.8183641679910764
22
+ }
23
+ ],
24
+ "validation": [
25
+ {
26
+ "cosine_pearson": 0.838787925673631,
27
+ "cosine_spearman": 0.8430493452514276,
28
+ "euclidean_pearson": 0.8404766496231687,
29
+ "euclidean_spearman": 0.8430491206723509,
30
+ "hf_subset": "default",
31
+ "languages": [
32
+ "deu-Latn"
33
+ ],
34
+ "main_score": 0.8430493452514276,
35
+ "manhattan_pearson": 0.8380200407784452,
36
+ "manhattan_spearman": 0.8409322436556135,
37
+ "pearson": 0.838787925673631,
38
+ "spearman": 0.8430493452514276
39
+ }
40
+ ]
41
+ },
42
+ "task_name": "GermanSTSBenchmark"
43
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/GreekLegalCodeClassification.json ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "de0fdb34424f07d1ac6f0ede23ee0ed44bd9f5d1",
3
+ "evaluation_time": 1071.4071300029755,
4
+ "kg_co2_emissions": 0.08963246324644233,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.383203125,
10
+ "f1": 0.3391984081885905,
11
+ "f1_weighted": 0.37520991714742447,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "ell-Grek"
15
+ ],
16
+ "main_score": 0.383203125,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.37939453125,
20
+ "f1": 0.3308987324649169,
21
+ "f1_weighted": 0.3683162267912403
22
+ },
23
+ {
24
+ "accuracy": 0.39599609375,
25
+ "f1": 0.36036772292269775,
26
+ "f1_weighted": 0.38671002171577507
27
+ },
28
+ {
29
+ "accuracy": 0.37158203125,
30
+ "f1": 0.33475379664563953,
31
+ "f1_weighted": 0.3638140682263029
32
+ },
33
+ {
34
+ "accuracy": 0.38427734375,
35
+ "f1": 0.3358027401176293,
36
+ "f1_weighted": 0.371551401983715
37
+ },
38
+ {
39
+ "accuracy": 0.384765625,
40
+ "f1": 0.3502641121290901,
41
+ "f1_weighted": 0.37503665204139447
42
+ },
43
+ {
44
+ "accuracy": 0.375,
45
+ "f1": 0.32794723319262653,
46
+ "f1_weighted": 0.3660861141123535
47
+ },
48
+ {
49
+ "accuracy": 0.38037109375,
50
+ "f1": 0.33796221846120034,
51
+ "f1_weighted": 0.3827332568276366
52
+ },
53
+ {
54
+ "accuracy": 0.3876953125,
55
+ "f1": 0.33541706361535534,
56
+ "f1_weighted": 0.382618421713433
57
+ },
58
+ {
59
+ "accuracy": 0.3857421875,
60
+ "f1": 0.3422207090147696,
61
+ "f1_weighted": 0.38239049970365024
62
+ },
63
+ {
64
+ "accuracy": 0.38720703125,
65
+ "f1": 0.3363497533219791,
66
+ "f1_weighted": 0.3728425083587439
67
+ }
68
+ ]
69
+ }
70
+ ],
71
+ "validation": [
72
+ {
73
+ "accuracy": 0.38525390625,
74
+ "f1": 0.31792173453091,
75
+ "f1_weighted": 0.37546083326884594,
76
+ "hf_subset": "default",
77
+ "languages": [
78
+ "ell-Grek"
79
+ ],
80
+ "main_score": 0.38525390625,
81
+ "scores_per_experiment": [
82
+ {
83
+ "accuracy": 0.3681640625,
84
+ "f1": 0.3035575213660245,
85
+ "f1_weighted": 0.3585180456318162
86
+ },
87
+ {
88
+ "accuracy": 0.3935546875,
89
+ "f1": 0.33061903397388276,
90
+ "f1_weighted": 0.3800376600702504
91
+ },
92
+ {
93
+ "accuracy": 0.3837890625,
94
+ "f1": 0.30726866268623976,
95
+ "f1_weighted": 0.3710300363825968
96
+ },
97
+ {
98
+ "accuracy": 0.37158203125,
99
+ "f1": 0.3040606956653981,
100
+ "f1_weighted": 0.3556131136784332
101
+ },
102
+ {
103
+ "accuracy": 0.38232421875,
104
+ "f1": 0.3036491499620793,
105
+ "f1_weighted": 0.37000934068251773
106
+ },
107
+ {
108
+ "accuracy": 0.37890625,
109
+ "f1": 0.3165219312720889,
110
+ "f1_weighted": 0.3770746510287172
111
+ },
112
+ {
113
+ "accuracy": 0.3857421875,
114
+ "f1": 0.32887878507916535,
115
+ "f1_weighted": 0.3789440539946909
116
+ },
117
+ {
118
+ "accuracy": 0.4033203125,
119
+ "f1": 0.3390519222955755,
120
+ "f1_weighted": 0.39164311444332744
121
+ },
122
+ {
123
+ "accuracy": 0.3916015625,
124
+ "f1": 0.32335855497461585,
125
+ "f1_weighted": 0.38393077853094826
126
+ },
127
+ {
128
+ "accuracy": 0.3935546875,
129
+ "f1": 0.32225108803402985,
130
+ "f1_weighted": 0.3878075382451614
131
+ }
132
+ ]
133
+ }
134
+ ]
135
+ },
136
+ "task_name": "GreekLegalCodeClassification"
137
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/GujaratiNewsClassification.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "1a5f2fa2914bfeff4fcdc6fff4194fa8ec8fa19e",
3
+ "evaluation_time": 6.972753524780273,
4
+ "kg_co2_emissions": 0.00021355648530411861,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.8477238239757208,
10
+ "f1": 0.8221634647888061,
11
+ "f1_weighted": 0.8482225918141404,
12
+ "hf_subset": "default",
13
+ "languages": [
14
+ "guj-Gujr"
15
+ ],
16
+ "main_score": 0.8477238239757208,
17
+ "scores_per_experiment": [
18
+ {
19
+ "accuracy": 0.8535660091047041,
20
+ "f1": 0.8312791666427889,
21
+ "f1_weighted": 0.855672948624524
22
+ },
23
+ {
24
+ "accuracy": 0.8376327769347496,
25
+ "f1": 0.8111079198900321,
26
+ "f1_weighted": 0.8368643067046453
27
+ },
28
+ {
29
+ "accuracy": 0.8125948406676783,
30
+ "f1": 0.7798853393922381,
31
+ "f1_weighted": 0.8152862278478445
32
+ },
33
+ {
34
+ "accuracy": 0.8512898330804249,
35
+ "f1": 0.825911652937802,
36
+ "f1_weighted": 0.851659011873412
37
+ },
38
+ {
39
+ "accuracy": 0.8512898330804249,
40
+ "f1": 0.8285899757250821,
41
+ "f1_weighted": 0.8512681618890673
42
+ },
43
+ {
44
+ "accuracy": 0.8482549317147192,
45
+ "f1": 0.8250188564332511,
46
+ "f1_weighted": 0.8469514763183472
47
+ },
48
+ {
49
+ "accuracy": 0.8285280728376327,
50
+ "f1": 0.8020108834027107,
51
+ "f1_weighted": 0.8276159385029642
52
+ },
53
+ {
54
+ "accuracy": 0.8679817905918058,
55
+ "f1": 0.8434413897488585,
56
+ "f1_weighted": 0.868820300507069
57
+ },
58
+ {
59
+ "accuracy": 0.8611532625189682,
60
+ "f1": 0.8340340336015197,
61
+ "f1_weighted": 0.8612382919482378
62
+ },
63
+ {
64
+ "accuracy": 0.8649468892261002,
65
+ "f1": 0.8403554301137786,
66
+ "f1_weighted": 0.8668492539252942
67
+ }
68
+ ]
69
+ }
70
+ ]
71
+ },
72
+ "task_name": "GujaratiNewsClassification"
73
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/HALClusteringS2S.v2.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "e06ebbbb123f8144bef1a5d18796f3dec9ae2915",
3
+ "evaluation_time": 39.20399856567383,
4
+ "kg_co2_emissions": 0.0011245355426239612,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "fra-Latn"
12
+ ],
13
+ "main_score": 0.2587842871849939,
14
+ "v_measure": 0.2587842871849939,
15
+ "v_measure_std": 0.013524264883457966,
16
+ "v_measures": {
17
+ "Level 0": [
18
+ 0.2678197846197006,
19
+ 0.26898506969246616,
20
+ 0.2514844298483211,
21
+ 0.23415184996489832,
22
+ 0.26424824721819157,
23
+ 0.24146053250543603,
24
+ 0.25488847476165793,
25
+ 0.2586300020690475,
26
+ 0.2837876822226263,
27
+ 0.2623867989475935
28
+ ]
29
+ }
30
+ }
31
+ ]
32
+ },
33
+ "task_name": "HALClusteringS2S.v2"
34
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/HagridRetrieval.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "b2a085913606be3c4f2f1a8bff1810e38bade8fa",
3
+ "evaluation_time": 1.221498966217041,
4
+ "kg_co2_emissions": 5.0861501820132336e-05,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "dev": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "eng-Latn"
12
+ ],
13
+ "main_score": 0.98545,
14
+ "map_at_1": 0.97782,
15
+ "map_at_10": 0.98387,
16
+ "map_at_100": 0.98401,
17
+ "map_at_1000": 0.98405,
18
+ "map_at_20": 0.98401,
19
+ "map_at_3": 0.98387,
20
+ "map_at_5": 0.98387,
21
+ "mrr_at_1": 0.9778225806451613,
22
+ "mrr_at_10": 0.9838709677419355,
23
+ "mrr_at_100": 0.984005376344086,
24
+ "mrr_at_1000": 0.9840460258103384,
25
+ "mrr_at_20": 0.984005376344086,
26
+ "mrr_at_3": 0.9838709677419355,
27
+ "mrr_at_5": 0.9838709677419355,
28
+ "nauc_map_at_1000_diff1": 0.7596441414783949,
29
+ "nauc_map_at_1000_max": 0.4670698642569906,
30
+ "nauc_map_at_1000_std": 0.40792183127479115,
31
+ "nauc_map_at_100_diff1": 0.75984967274504,
32
+ "nauc_map_at_100_max": 0.46784490292569514,
33
+ "nauc_map_at_100_std": 0.40901314346987466,
34
+ "nauc_map_at_10_diff1": 0.7488222864439992,
35
+ "nauc_map_at_10_max": 0.4685629868590867,
36
+ "nauc_map_at_10_std": 0.4041729822269182,
37
+ "nauc_map_at_1_diff1": 0.817325299231999,
38
+ "nauc_map_at_1_max": 0.3258902798843844,
39
+ "nauc_map_at_1_std": 0.33150040853246854,
40
+ "nauc_map_at_20_diff1": 0.75984967274504,
41
+ "nauc_map_at_20_max": 0.46784490292569514,
42
+ "nauc_map_at_20_std": 0.40901314346987466,
43
+ "nauc_map_at_3_diff1": 0.7488222864439992,
44
+ "nauc_map_at_3_max": 0.4685629868590867,
45
+ "nauc_map_at_3_std": 0.4041729822269182,
46
+ "nauc_map_at_5_diff1": 0.7488222864439992,
47
+ "nauc_map_at_5_max": 0.4685629868590867,
48
+ "nauc_map_at_5_std": 0.4041729822269182,
49
+ "nauc_mrr_at_1000_diff1": 0.759640358929165,
50
+ "nauc_mrr_at_1000_max": 0.46706833746364207,
51
+ "nauc_mrr_at_1000_std": 0.407933343826773,
52
+ "nauc_mrr_at_100_diff1": 0.75984967274504,
53
+ "nauc_mrr_at_100_max": 0.46784490292569514,
54
+ "nauc_mrr_at_100_std": 0.40901314346987466,
55
+ "nauc_mrr_at_10_diff1": 0.7488222864439992,
56
+ "nauc_mrr_at_10_max": 0.4685629868590867,
57
+ "nauc_mrr_at_10_std": 0.4041729822269182,
58
+ "nauc_mrr_at_1_diff1": 0.817325299231999,
59
+ "nauc_mrr_at_1_max": 0.3258902798843844,
60
+ "nauc_mrr_at_1_std": 0.33150040853246854,
61
+ "nauc_mrr_at_20_diff1": 0.75984967274504,
62
+ "nauc_mrr_at_20_max": 0.46784490292569514,
63
+ "nauc_mrr_at_20_std": 0.40901314346987466,
64
+ "nauc_mrr_at_3_diff1": 0.7488222864439992,
65
+ "nauc_mrr_at_3_max": 0.4685629868590867,
66
+ "nauc_mrr_at_3_std": 0.4041729822269182,
67
+ "nauc_mrr_at_5_diff1": 0.7488222864439992,
68
+ "nauc_mrr_at_5_max": 0.4685629868590867,
69
+ "nauc_mrr_at_5_std": 0.4041729822269182,
70
+ "nauc_ndcg_at_1000_diff1": 0.7585426485621731,
71
+ "nauc_ndcg_at_1000_max": 0.500072731661684,
72
+ "nauc_ndcg_at_1000_std": 0.42969871015422434,
73
+ "nauc_ndcg_at_100_diff1": 0.7675955508173848,
74
+ "nauc_ndcg_at_100_max": 0.5245043534496843,
75
+ "nauc_ndcg_at_100_std": 0.45490581706468297,
76
+ "nauc_ndcg_at_10_diff1": 0.7214715394135851,
77
+ "nauc_ndcg_at_10_max": 0.5255269795039847,
78
+ "nauc_ndcg_at_10_std": 0.4331884823964825,
79
+ "nauc_ndcg_at_1_diff1": 0.817325299231999,
80
+ "nauc_ndcg_at_1_max": 0.3258902798843844,
81
+ "nauc_ndcg_at_1_std": 0.33150040853246854,
82
+ "nauc_ndcg_at_20_diff1": 0.7675955508173848,
83
+ "nauc_ndcg_at_20_max": 0.5245043534496843,
84
+ "nauc_ndcg_at_20_std": 0.45490581706468297,
85
+ "nauc_ndcg_at_3_diff1": 0.7214715394135851,
86
+ "nauc_ndcg_at_3_max": 0.5255269795039847,
87
+ "nauc_ndcg_at_3_std": 0.4331884823964825,
88
+ "nauc_ndcg_at_5_diff1": 0.7214715394135851,
89
+ "nauc_ndcg_at_5_max": 0.5255269795039847,
90
+ "nauc_ndcg_at_5_std": 0.4331884823964825,
91
+ "nauc_precision_at_1000_diff1": 1.0,
92
+ "nauc_precision_at_1000_max": 1.0,
93
+ "nauc_precision_at_1000_std": 1.0,
94
+ "nauc_precision_at_100_diff1": 0.8885037437330375,
95
+ "nauc_precision_at_100_max": 0.8395499340212191,
96
+ "nauc_precision_at_100_std": 0.7480173568647898,
97
+ "nauc_precision_at_10_diff1": 0.5981156583103825,
98
+ "nauc_precision_at_10_max": 0.7824429422034134,
99
+ "nauc_precision_at_10_std": 0.5640526443546937,
100
+ "nauc_precision_at_1_diff1": 0.817325299231999,
101
+ "nauc_precision_at_1_max": 0.3258902798843844,
102
+ "nauc_precision_at_1_std": 0.33150040853246854,
103
+ "nauc_precision_at_20_diff1": 0.8885037437330352,
104
+ "nauc_precision_at_20_max": 0.8395499340212462,
105
+ "nauc_precision_at_20_std": 0.748017356864805,
106
+ "nauc_precision_at_3_diff1": 0.5981156583104059,
107
+ "nauc_precision_at_3_max": 0.7824429422034322,
108
+ "nauc_precision_at_3_std": 0.5640526443547186,
109
+ "nauc_precision_at_5_diff1": 0.5981156583103825,
110
+ "nauc_precision_at_5_max": 0.7824429422034134,
111
+ "nauc_precision_at_5_std": 0.5640526443546937,
112
+ "nauc_recall_at_1000_diff1": NaN,
113
+ "nauc_recall_at_1000_max": NaN,
114
+ "nauc_recall_at_1000_std": NaN,
115
+ "nauc_recall_at_100_diff1": 0.8885037437330474,
116
+ "nauc_recall_at_100_max": 0.8395499340212359,
117
+ "nauc_recall_at_100_std": 0.748017356864806,
118
+ "nauc_recall_at_10_diff1": 0.5981156583104007,
119
+ "nauc_recall_at_10_max": 0.7824429422034165,
120
+ "nauc_recall_at_10_std": 0.5640526443547194,
121
+ "nauc_recall_at_1_diff1": 0.817325299231999,
122
+ "nauc_recall_at_1_max": 0.3258902798843844,
123
+ "nauc_recall_at_1_std": 0.33150040853246854,
124
+ "nauc_recall_at_20_diff1": 0.8885037437330474,
125
+ "nauc_recall_at_20_max": 0.8395499340212359,
126
+ "nauc_recall_at_20_std": 0.748017356864806,
127
+ "nauc_recall_at_3_diff1": 0.5981156583104007,
128
+ "nauc_recall_at_3_max": 0.7824429422034165,
129
+ "nauc_recall_at_3_std": 0.5640526443547194,
130
+ "nauc_recall_at_5_diff1": 0.5981156583104007,
131
+ "nauc_recall_at_5_max": 0.7824429422034165,
132
+ "nauc_recall_at_5_std": 0.5640526443547194,
133
+ "ndcg_at_1": 0.97782,
134
+ "ndcg_at_10": 0.98545,
135
+ "ndcg_at_100": 0.98596,
136
+ "ndcg_at_1000": 0.98701,
137
+ "ndcg_at_20": 0.98596,
138
+ "ndcg_at_3": 0.98545,
139
+ "ndcg_at_5": 0.98545,
140
+ "precision_at_1": 0.97782,
141
+ "precision_at_10": 0.09899,
142
+ "precision_at_100": 0.00992,
143
+ "precision_at_1000": 0.001,
144
+ "precision_at_20": 0.0496,
145
+ "precision_at_3": 0.32997,
146
+ "precision_at_5": 0.19798,
147
+ "recall_at_1": 0.97782,
148
+ "recall_at_10": 0.98992,
149
+ "recall_at_100": 0.99194,
150
+ "recall_at_1000": 1.0,
151
+ "recall_at_20": 0.99194,
152
+ "recall_at_3": 0.98992,
153
+ "recall_at_5": 0.98992
154
+ }
155
+ ]
156
+ },
157
+ "task_name": "HagridRetrieval"
158
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/HotpotQA-PLHardNegatives.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "0642cadffa3205c6b21c9af901fdffcd60d6f31e",
3
+ "evaluation_time": 222.54219388961792,
4
+ "kg_co2_emissions": 0.018523006984017196,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "pol-Latn"
12
+ ],
13
+ "main_score": 0.58222,
14
+ "map_at_1": 0.3605,
15
+ "map_at_10": 0.48535,
16
+ "map_at_100": 0.49608,
17
+ "map_at_1000": 0.49705,
18
+ "map_at_20": 0.4913,
19
+ "map_at_3": 0.4575,
20
+ "map_at_5": 0.47297,
21
+ "mrr_at_1": 0.721,
22
+ "mrr_at_10": 0.7797908730158735,
23
+ "mrr_at_100": 0.7833174366817578,
24
+ "mrr_at_1000": 0.783449695242909,
25
+ "mrr_at_20": 0.7820251827377991,
26
+ "mrr_at_3": 0.7673333333333336,
27
+ "mrr_at_5": 0.7753833333333339,
28
+ "nauc_map_at_1000_diff1": 0.2667180057565436,
29
+ "nauc_map_at_1000_max": 0.267085868514929,
30
+ "nauc_map_at_1000_std": 0.014638865880404936,
31
+ "nauc_map_at_100_diff1": 0.26676764467629965,
32
+ "nauc_map_at_100_max": 0.2672158722013806,
33
+ "nauc_map_at_100_std": 0.014226105367284365,
34
+ "nauc_map_at_10_diff1": 0.2753225303757146,
35
+ "nauc_map_at_10_max": 0.2691078419732798,
36
+ "nauc_map_at_10_std": 0.001259006514719525,
37
+ "nauc_map_at_1_diff1": 0.7291303507566783,
38
+ "nauc_map_at_1_max": 0.5594522141622957,
39
+ "nauc_map_at_1_std": -0.03159699433424003,
40
+ "nauc_map_at_20_diff1": 0.26913065816828574,
41
+ "nauc_map_at_20_max": 0.2666086488544034,
42
+ "nauc_map_at_20_std": 0.00858559776096872,
43
+ "nauc_map_at_3_diff1": 0.30920385771061676,
44
+ "nauc_map_at_3_max": 0.28759418323475217,
45
+ "nauc_map_at_3_std": -0.017513981636210922,
46
+ "nauc_map_at_5_diff1": 0.28279436421879267,
47
+ "nauc_map_at_5_max": 0.2744202174707723,
48
+ "nauc_map_at_5_std": -0.002603492783359323,
49
+ "nauc_mrr_at_1000_diff1": 0.6966690218447221,
50
+ "nauc_mrr_at_1000_max": 0.5528034450058595,
51
+ "nauc_mrr_at_1000_std": -0.021894637008817587,
52
+ "nauc_mrr_at_100_diff1": 0.69661451376762,
53
+ "nauc_mrr_at_100_max": 0.5528960609171659,
54
+ "nauc_mrr_at_100_std": -0.021686341005405365,
55
+ "nauc_mrr_at_10_diff1": 0.6942141064589518,
56
+ "nauc_mrr_at_10_max": 0.5503439982169952,
57
+ "nauc_mrr_at_10_std": -0.0232779345413061,
58
+ "nauc_mrr_at_1_diff1": 0.7291303507566783,
59
+ "nauc_mrr_at_1_max": 0.5594522141622957,
60
+ "nauc_mrr_at_1_std": -0.03159699433424003,
61
+ "nauc_mrr_at_20_diff1": 0.6962940350534658,
62
+ "nauc_mrr_at_20_max": 0.5523156260244149,
63
+ "nauc_mrr_at_20_std": -0.023000518601048996,
64
+ "nauc_mrr_at_3_diff1": 0.6979765317217133,
65
+ "nauc_mrr_at_3_max": 0.550178327106686,
66
+ "nauc_mrr_at_3_std": -0.028010993184255508,
67
+ "nauc_mrr_at_5_diff1": 0.6958659341499192,
68
+ "nauc_mrr_at_5_max": 0.5517078085185242,
69
+ "nauc_mrr_at_5_std": -0.02151275221462315,
70
+ "nauc_ndcg_at_1000_diff1": 0.2916646723060265,
71
+ "nauc_ndcg_at_1000_max": 0.30008011784526073,
72
+ "nauc_ndcg_at_1000_std": 0.06657720883818576,
73
+ "nauc_ndcg_at_100_diff1": 0.28665379070336217,
74
+ "nauc_ndcg_at_100_max": 0.29956478989333346,
75
+ "nauc_ndcg_at_100_std": 0.06166541405706987,
76
+ "nauc_ndcg_at_10_diff1": 0.3185569767875872,
77
+ "nauc_ndcg_at_10_max": 0.30368581160340996,
78
+ "nauc_ndcg_at_10_std": 0.00913167994814244,
79
+ "nauc_ndcg_at_1_diff1": 0.7291303507566783,
80
+ "nauc_ndcg_at_1_max": 0.5594522141622957,
81
+ "nauc_ndcg_at_1_std": -0.03159699433424003,
82
+ "nauc_ndcg_at_20_diff1": 0.302053878150126,
83
+ "nauc_ndcg_at_20_max": 0.2980603197164483,
84
+ "nauc_ndcg_at_20_std": 0.02766999811500463,
85
+ "nauc_ndcg_at_3_diff1": 0.3764811937581994,
86
+ "nauc_ndcg_at_3_max": 0.3352896453598404,
87
+ "nauc_ndcg_at_3_std": -0.01880594463710868,
88
+ "nauc_ndcg_at_5_diff1": 0.33707969496121387,
89
+ "nauc_ndcg_at_5_max": 0.31625034197419205,
90
+ "nauc_ndcg_at_5_std": 0.002532516144258022,
91
+ "nauc_precision_at_1000_diff1": -0.267439503701553,
92
+ "nauc_precision_at_1000_max": -0.008475018962230425,
93
+ "nauc_precision_at_1000_std": 0.4642141488519215,
94
+ "nauc_precision_at_100_diff1": -0.09841972219126524,
95
+ "nauc_precision_at_100_max": 0.097709243067446,
96
+ "nauc_precision_at_100_std": 0.2795551716801247,
97
+ "nauc_precision_at_10_diff1": 0.098995200477775,
98
+ "nauc_precision_at_10_max": 0.15989697979514156,
99
+ "nauc_precision_at_10_std": 0.04571832641935291,
100
+ "nauc_precision_at_1_diff1": 0.7291303507566783,
101
+ "nauc_precision_at_1_max": 0.5594522141622957,
102
+ "nauc_precision_at_1_std": -0.03159699433424003,
103
+ "nauc_precision_at_20_diff1": 0.03726352202748027,
104
+ "nauc_precision_at_20_max": 0.1358729249280041,
105
+ "nauc_precision_at_20_std": 0.10463941619046158,
106
+ "nauc_precision_at_3_diff1": 0.23556370199844162,
107
+ "nauc_precision_at_3_max": 0.24289326119476565,
108
+ "nauc_precision_at_3_std": -0.013637951575194185,
109
+ "nauc_precision_at_5_diff1": 0.1567194635254898,
110
+ "nauc_precision_at_5_max": 0.20361909794268396,
111
+ "nauc_precision_at_5_std": 0.028770851322498583,
112
+ "nauc_recall_at_1000_diff1": -0.2674395037015512,
113
+ "nauc_recall_at_1000_max": -0.008475018962227633,
114
+ "nauc_recall_at_1000_std": 0.4642141488519231,
115
+ "nauc_recall_at_100_diff1": -0.0984197221912639,
116
+ "nauc_recall_at_100_max": 0.097709243067446,
117
+ "nauc_recall_at_100_std": 0.27955517168012484,
118
+ "nauc_recall_at_10_diff1": 0.09899520047777512,
119
+ "nauc_recall_at_10_max": 0.15989697979514092,
120
+ "nauc_recall_at_10_std": 0.045718326419352605,
121
+ "nauc_recall_at_1_diff1": 0.7291303507566783,
122
+ "nauc_recall_at_1_max": 0.5594522141622957,
123
+ "nauc_recall_at_1_std": -0.03159699433424003,
124
+ "nauc_recall_at_20_diff1": 0.037263522027480435,
125
+ "nauc_recall_at_20_max": 0.13587292492800443,
126
+ "nauc_recall_at_20_std": 0.10463941619046209,
127
+ "nauc_recall_at_3_diff1": 0.23556370199844193,
128
+ "nauc_recall_at_3_max": 0.24289326119476531,
129
+ "nauc_recall_at_3_std": -0.013637951575194672,
130
+ "nauc_recall_at_5_diff1": 0.15671946352548985,
131
+ "nauc_recall_at_5_max": 0.2036190979426835,
132
+ "nauc_recall_at_5_std": 0.02877085132249882,
133
+ "ndcg_at_1": 0.721,
134
+ "ndcg_at_10": 0.58222,
135
+ "ndcg_at_100": 0.62249,
136
+ "ndcg_at_1000": 0.64143,
137
+ "ndcg_at_20": 0.59823,
138
+ "ndcg_at_3": 0.53909,
139
+ "ndcg_at_5": 0.5606,
140
+ "precision_at_1": 0.721,
141
+ "precision_at_10": 0.1183,
142
+ "precision_at_100": 0.01501,
143
+ "precision_at_1000": 0.00175,
144
+ "precision_at_20": 0.06435,
145
+ "precision_at_3": 0.32967,
146
+ "precision_at_5": 0.2148,
147
+ "recall_at_1": 0.3605,
148
+ "recall_at_10": 0.5915,
149
+ "recall_at_100": 0.7505,
150
+ "recall_at_1000": 0.8755,
151
+ "recall_at_20": 0.6435,
152
+ "recall_at_3": 0.4945,
153
+ "recall_at_5": 0.537
154
+ }
155
+ ]
156
+ },
157
+ "task_name": "HotpotQA-PLHardNegatives"
158
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/HotpotQAHardNegatives.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "617612fa63afcb60e3b134bed8b7216a99707c37",
3
+ "evaluation_time": 199.39971446990967,
4
+ "kg_co2_emissions": 0.016100751439392522,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "hf_subset": "default",
10
+ "languages": [
11
+ "eng-Latn"
12
+ ],
13
+ "main_score": 0.64118,
14
+ "map_at_1": 0.405,
15
+ "map_at_10": 0.5426,
16
+ "map_at_100": 0.55427,
17
+ "map_at_1000": 0.55518,
18
+ "map_at_20": 0.54905,
19
+ "map_at_3": 0.514,
20
+ "map_at_5": 0.53155,
21
+ "mrr_at_1": 0.81,
22
+ "mrr_at_10": 0.8578654761904764,
23
+ "mrr_at_100": 0.8600703481054663,
24
+ "mrr_at_1000": 0.860137119135413,
25
+ "mrr_at_20": 0.8593080416917107,
26
+ "mrr_at_3": 0.8496666666666669,
27
+ "mrr_at_5": 0.8547666666666669,
28
+ "nauc_map_at_1000_diff1": 0.155587601517561,
29
+ "nauc_map_at_1000_max": 0.17934313701988794,
30
+ "nauc_map_at_1000_std": -0.019616850132664305,
31
+ "nauc_map_at_100_diff1": 0.154931982823591,
32
+ "nauc_map_at_100_max": 0.17891069637988444,
33
+ "nauc_map_at_100_std": -0.019915809499914294,
34
+ "nauc_map_at_10_diff1": 0.16324544030723376,
35
+ "nauc_map_at_10_max": 0.18173808602641572,
36
+ "nauc_map_at_10_std": -0.03107559774735843,
37
+ "nauc_map_at_1_diff1": 0.7602886479124105,
38
+ "nauc_map_at_1_max": 0.6259547383309763,
39
+ "nauc_map_at_1_std": -0.10081984388915124,
40
+ "nauc_map_at_20_diff1": 0.1562896440553918,
41
+ "nauc_map_at_20_max": 0.17788371954637822,
42
+ "nauc_map_at_20_std": -0.027691691903463937,
43
+ "nauc_map_at_3_diff1": 0.19054270978600454,
44
+ "nauc_map_at_3_max": 0.2134148185124997,
45
+ "nauc_map_at_3_std": -0.05090775449341879,
46
+ "nauc_map_at_5_diff1": 0.16927636084124592,
47
+ "nauc_map_at_5_max": 0.19035149324414186,
48
+ "nauc_map_at_5_std": -0.03279546529107175,
49
+ "nauc_mrr_at_1000_diff1": 0.7556767658193468,
50
+ "nauc_mrr_at_1000_max": 0.6448211461093355,
51
+ "nauc_mrr_at_1000_std": -0.11927523216436862,
52
+ "nauc_mrr_at_100_diff1": 0.7557423980784244,
53
+ "nauc_mrr_at_100_max": 0.644955807815918,
54
+ "nauc_mrr_at_100_std": -0.11922972279265914,
55
+ "nauc_mrr_at_10_diff1": 0.7555042563638119,
56
+ "nauc_mrr_at_10_max": 0.6472044558693102,
57
+ "nauc_mrr_at_10_std": -0.11812608153973177,
58
+ "nauc_mrr_at_1_diff1": 0.7602886479124105,
59
+ "nauc_mrr_at_1_max": 0.6259547383309763,
60
+ "nauc_mrr_at_1_std": -0.10081984388915124,
61
+ "nauc_mrr_at_20_diff1": 0.7560463172156042,
62
+ "nauc_mrr_at_20_max": 0.6460255721882858,
63
+ "nauc_mrr_at_20_std": -0.12114243004081665,
64
+ "nauc_mrr_at_3_diff1": 0.755620495322388,
65
+ "nauc_mrr_at_3_max": 0.6479737205037651,
66
+ "nauc_mrr_at_3_std": -0.12437727466598479,
67
+ "nauc_mrr_at_5_diff1": 0.755252188276229,
68
+ "nauc_mrr_at_5_max": 0.6453620524979882,
69
+ "nauc_mrr_at_5_std": -0.12192826326489287,
70
+ "nauc_ndcg_at_1000_diff1": 0.20090273457671762,
71
+ "nauc_ndcg_at_1000_max": 0.22081169055619984,
72
+ "nauc_ndcg_at_1000_std": 0.017069601140913594,
73
+ "nauc_ndcg_at_100_diff1": 0.179231535500462,
74
+ "nauc_ndcg_at_100_max": 0.2084222810485699,
75
+ "nauc_ndcg_at_100_std": 0.019633126770365966,
76
+ "nauc_ndcg_at_10_diff1": 0.2123786983410829,
77
+ "nauc_ndcg_at_10_max": 0.22189958364205634,
78
+ "nauc_ndcg_at_10_std": -0.03319479999775504,
79
+ "nauc_ndcg_at_1_diff1": 0.7602886479124105,
80
+ "nauc_ndcg_at_1_max": 0.6259547383309763,
81
+ "nauc_ndcg_at_1_std": -0.10081984388915124,
82
+ "nauc_ndcg_at_20_diff1": 0.19207681796996898,
83
+ "nauc_ndcg_at_20_max": 0.20915694949123118,
84
+ "nauc_ndcg_at_20_std": -0.026787998286494485,
85
+ "nauc_ndcg_at_3_diff1": 0.26512028841779756,
86
+ "nauc_ndcg_at_3_max": 0.27706297508196925,
87
+ "nauc_ndcg_at_3_std": -0.06416632823695793,
88
+ "nauc_ndcg_at_5_diff1": 0.22973562967550976,
89
+ "nauc_ndcg_at_5_max": 0.24022382854880628,
90
+ "nauc_ndcg_at_5_std": -0.040531311626606385,
91
+ "nauc_precision_at_1000_diff1": -0.22181080233960596,
92
+ "nauc_precision_at_1000_max": -0.1027667249265575,
93
+ "nauc_precision_at_1000_std": 0.4059271025356315,
94
+ "nauc_precision_at_100_diff1": -0.2075884868166366,
95
+ "nauc_precision_at_100_max": -0.06282932843659347,
96
+ "nauc_precision_at_100_std": 0.2664545524931444,
97
+ "nauc_precision_at_10_diff1": -0.004222154133206406,
98
+ "nauc_precision_at_10_max": 0.04812432546143043,
99
+ "nauc_precision_at_10_std": 0.012756781970841648,
100
+ "nauc_precision_at_1_diff1": 0.7602886479124105,
101
+ "nauc_precision_at_1_max": 0.6259547383309763,
102
+ "nauc_precision_at_1_std": -0.10081984388915124,
103
+ "nauc_precision_at_20_diff1": -0.07533276324713226,
104
+ "nauc_precision_at_20_max": 0.0003220004128206769,
105
+ "nauc_precision_at_20_std": 0.033315837584406885,
106
+ "nauc_precision_at_3_diff1": 0.10477173408616292,
107
+ "nauc_precision_at_3_max": 0.16437369479464758,
108
+ "nauc_precision_at_3_std": -0.04928358728439993,
109
+ "nauc_precision_at_5_diff1": 0.03907259061407471,
110
+ "nauc_precision_at_5_max": 0.0929893229389289,
111
+ "nauc_precision_at_5_std": -0.006490340737112654,
112
+ "nauc_recall_at_1000_diff1": -0.2218108023396033,
113
+ "nauc_recall_at_1000_max": -0.10276672492655371,
114
+ "nauc_recall_at_1000_std": 0.4059271025356344,
115
+ "nauc_recall_at_100_diff1": -0.2075884868166367,
116
+ "nauc_recall_at_100_max": -0.0628293284365931,
117
+ "nauc_recall_at_100_std": 0.2664545524931447,
118
+ "nauc_recall_at_10_diff1": -0.004222154133205925,
119
+ "nauc_recall_at_10_max": 0.0481243254614309,
120
+ "nauc_recall_at_10_std": 0.012756781970841407,
121
+ "nauc_recall_at_1_diff1": 0.7602886479124105,
122
+ "nauc_recall_at_1_max": 0.6259547383309763,
123
+ "nauc_recall_at_1_std": -0.10081984388915124,
124
+ "nauc_recall_at_20_diff1": -0.07533276324713266,
125
+ "nauc_recall_at_20_max": 0.0003220004128211715,
126
+ "nauc_recall_at_20_std": 0.03331583758440742,
127
+ "nauc_recall_at_3_diff1": 0.1047717340861625,
128
+ "nauc_recall_at_3_max": 0.16437369479464706,
129
+ "nauc_recall_at_3_std": -0.049283587284399466,
130
+ "nauc_recall_at_5_diff1": 0.039072590614075316,
131
+ "nauc_recall_at_5_max": 0.09298932293892978,
132
+ "nauc_recall_at_5_std": -0.006490340737112728,
133
+ "ndcg_at_1": 0.81,
134
+ "ndcg_at_10": 0.64118,
135
+ "ndcg_at_100": 0.68206,
136
+ "ndcg_at_1000": 0.69842,
137
+ "ndcg_at_20": 0.65716,
138
+ "ndcg_at_3": 0.60017,
139
+ "ndcg_at_5": 0.62239,
140
+ "precision_at_1": 0.81,
141
+ "precision_at_10": 0.1277,
142
+ "precision_at_100": 0.01599,
143
+ "precision_at_1000": 0.00181,
144
+ "precision_at_20": 0.069,
145
+ "precision_at_3": 0.36467,
146
+ "precision_at_5": 0.2364,
147
+ "recall_at_1": 0.405,
148
+ "recall_at_10": 0.6385,
149
+ "recall_at_100": 0.7995,
150
+ "recall_at_1000": 0.9065,
151
+ "recall_at_20": 0.69,
152
+ "recall_at_3": 0.547,
153
+ "recall_at_5": 0.591
154
+ }
155
+ ]
156
+ },
157
+ "task_name": "HotpotQAHardNegatives"
158
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/IN22GenBitextMining.json ADDED
The diff for this file is too large to render. See raw diff
 
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/IndicCrosslingualSTS.json ADDED
@@ -0,0 +1,203 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "0ca7b87dda68ef4ebb2f50a20a62b9dbebcac3e4",
3
+ "evaluation_time": 3.285512924194336,
4
+ "kg_co2_emissions": 0.00017477721651682455,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "cosine_pearson": 0.5610533134292232,
10
+ "cosine_spearman": 0.535873274413269,
11
+ "euclidean_pearson": 0.5491189171052261,
12
+ "euclidean_spearman": 0.535873274413269,
13
+ "hf_subset": "en-ml",
14
+ "languages": [
15
+ "eng-Latn",
16
+ "mal-Mlym"
17
+ ],
18
+ "main_score": 0.535873274413269,
19
+ "manhattan_pearson": 0.5505778248278909,
20
+ "manhattan_spearman": 0.5367856577620813,
21
+ "pearson": 0.5610533134292232,
22
+ "spearman": 0.535873274413269
23
+ },
24
+ {
25
+ "cosine_pearson": 0.5260025132435664,
26
+ "cosine_spearman": 0.5157195164235536,
27
+ "euclidean_pearson": 0.5233474053004464,
28
+ "euclidean_spearman": 0.5157195164235536,
29
+ "hf_subset": "en-pa",
30
+ "languages": [
31
+ "eng-Latn",
32
+ "pan-Guru"
33
+ ],
34
+ "main_score": 0.5157195164235536,
35
+ "manhattan_pearson": 0.5195858258299345,
36
+ "manhattan_spearman": 0.5111684174152716,
37
+ "pearson": 0.5260025132435664,
38
+ "spearman": 0.5157195164235536
39
+ },
40
+ {
41
+ "cosine_pearson": 0.032390643812766455,
42
+ "cosine_spearman": 0.04544392051012095,
43
+ "euclidean_pearson": 0.033479099156466706,
44
+ "euclidean_spearman": 0.04544392051012095,
45
+ "hf_subset": "en-or",
46
+ "languages": [
47
+ "eng-Latn",
48
+ "ory-Orya"
49
+ ],
50
+ "main_score": 0.04544392051012095,
51
+ "manhattan_pearson": 0.026513700628785983,
52
+ "manhattan_spearman": 0.03996007979330212,
53
+ "pearson": 0.032390643812766455,
54
+ "spearman": 0.04544392051012095
55
+ },
56
+ {
57
+ "cosine_pearson": 0.13605259611050857,
58
+ "cosine_spearman": 0.13833989627303,
59
+ "euclidean_pearson": 0.1421977354780135,
60
+ "euclidean_spearman": 0.13833989627303,
61
+ "hf_subset": "en-ur",
62
+ "languages": [
63
+ "eng-Latn",
64
+ "urd-Arab"
65
+ ],
66
+ "main_score": 0.13833989627303,
67
+ "manhattan_pearson": 0.14300362263186167,
68
+ "manhattan_spearman": 0.13891860507995377,
69
+ "pearson": 0.13605259611050857,
70
+ "spearman": 0.13833989627303
71
+ },
72
+ {
73
+ "cosine_pearson": 0.45930500802649765,
74
+ "cosine_spearman": 0.4173791661382292,
75
+ "euclidean_pearson": 0.4487847431582519,
76
+ "euclidean_spearman": 0.4173791661382292,
77
+ "hf_subset": "en-ta",
78
+ "languages": [
79
+ "eng-Latn",
80
+ "tam-Taml"
81
+ ],
82
+ "main_score": 0.4173791661382292,
83
+ "manhattan_pearson": 0.4462453349413776,
84
+ "manhattan_spearman": 0.4144978937577361,
85
+ "pearson": 0.45930500802649765,
86
+ "spearman": 0.4173791661382292
87
+ },
88
+ {
89
+ "cosine_pearson": 0.6459078982139285,
90
+ "cosine_spearman": 0.6221961739969121,
91
+ "euclidean_pearson": 0.6321890015160866,
92
+ "euclidean_spearman": 0.6221961739969121,
93
+ "hf_subset": "en-hi",
94
+ "languages": [
95
+ "eng-Latn",
96
+ "hin-Deva"
97
+ ],
98
+ "main_score": 0.6221961739969121,
99
+ "manhattan_pearson": 0.6296181060948292,
100
+ "manhattan_spearman": 0.6209087117399212,
101
+ "pearson": 0.6459078982139285,
102
+ "spearman": 0.6221961739969121
103
+ },
104
+ {
105
+ "cosine_pearson": 0.4479874605869105,
106
+ "cosine_spearman": 0.44934960947182584,
107
+ "euclidean_pearson": 0.43655293068251144,
108
+ "euclidean_spearman": 0.44934960947182584,
109
+ "hf_subset": "en-kn",
110
+ "languages": [
111
+ "eng-Latn",
112
+ "kan-Knda"
113
+ ],
114
+ "main_score": 0.44934960947182584,
115
+ "manhattan_pearson": 0.4332296313423368,
116
+ "manhattan_spearman": 0.44515620882456924,
117
+ "pearson": 0.4479874605869105,
118
+ "spearman": 0.44934960947182584
119
+ },
120
+ {
121
+ "cosine_pearson": 0.4210160669148245,
122
+ "cosine_spearman": 0.419670827675749,
123
+ "euclidean_pearson": 0.4168769718987576,
124
+ "euclidean_spearman": 0.419670827675749,
125
+ "hf_subset": "en-te",
126
+ "languages": [
127
+ "eng-Latn",
128
+ "tel-Telu"
129
+ ],
130
+ "main_score": 0.419670827675749,
131
+ "manhattan_pearson": 0.4077340245544866,
132
+ "manhattan_spearman": 0.4096029168240839,
133
+ "pearson": 0.4210160669148245,
134
+ "spearman": 0.419670827675749
135
+ },
136
+ {
137
+ "cosine_pearson": 0.46574540290034966,
138
+ "cosine_spearman": 0.4199000999632796,
139
+ "euclidean_pearson": 0.44150812503863923,
140
+ "euclidean_spearman": 0.4199000999632796,
141
+ "hf_subset": "en-bn",
142
+ "languages": [
143
+ "eng-Latn",
144
+ "ben-Beng"
145
+ ],
146
+ "main_score": 0.4199000999632796,
147
+ "manhattan_pearson": 0.43499002636950085,
148
+ "manhattan_spearman": 0.4098834035327296,
149
+ "pearson": 0.46574540290034966,
150
+ "spearman": 0.4199000999632796
151
+ },
152
+ {
153
+ "cosine_pearson": 0.487289344594788,
154
+ "cosine_spearman": 0.4928978492017074,
155
+ "euclidean_pearson": 0.48189694988537274,
156
+ "euclidean_spearman": 0.4928978492017074,
157
+ "hf_subset": "en-gu",
158
+ "languages": [
159
+ "eng-Latn",
160
+ "guj-Gujr"
161
+ ],
162
+ "main_score": 0.4928978492017074,
163
+ "manhattan_pearson": 0.47639786701647396,
164
+ "manhattan_spearman": 0.4905890214955381,
165
+ "pearson": 0.487289344594788,
166
+ "spearman": 0.4928978492017074
167
+ },
168
+ {
169
+ "cosine_pearson": 0.43280152492103896,
170
+ "cosine_spearman": 0.4245750322386225,
171
+ "euclidean_pearson": 0.4177386178642453,
172
+ "euclidean_spearman": 0.42457495631752523,
173
+ "hf_subset": "en-mr",
174
+ "languages": [
175
+ "eng-Latn",
176
+ "mar-Deva"
177
+ ],
178
+ "main_score": 0.4245750322386225,
179
+ "manhattan_pearson": 0.414210022549917,
180
+ "manhattan_spearman": 0.4254818244417916,
181
+ "pearson": 0.43280152492103896,
182
+ "spearman": 0.4245750322386225
183
+ },
184
+ {
185
+ "cosine_pearson": 0.4877297997006108,
186
+ "cosine_spearman": 0.48255590060465614,
187
+ "euclidean_pearson": 0.48272721174748323,
188
+ "euclidean_spearman": 0.48255590060465614,
189
+ "hf_subset": "en-as",
190
+ "languages": [
191
+ "eng-Latn",
192
+ "asm-Beng"
193
+ ],
194
+ "main_score": 0.48255590060465614,
195
+ "manhattan_pearson": 0.4798234827073941,
196
+ "manhattan_spearman": 0.47894125371264695,
197
+ "pearson": 0.4877297997006108,
198
+ "spearman": 0.48255590060465614
199
+ }
200
+ ]
201
+ },
202
+ "task_name": "IndicCrosslingualSTS"
203
+ }
results/Alibaba-NLP__gte-multilingual-base/7fc06782350c1a83f88b15dd4b38ef853d3b8503/IndicGenBenchFloresBitextMining.json ADDED
@@ -0,0 +1,1405 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset_revision": "f8650438298df086750ff4973661bb58a201a5ee",
3
+ "evaluation_time": 149.4014880657196,
4
+ "kg_co2_emissions": 0.008644489616315428,
5
+ "mteb_version": "1.12.75",
6
+ "scores": {
7
+ "test": [
8
+ {
9
+ "accuracy": 0.9950592885375494,
10
+ "f1": 0.9934123847167324,
11
+ "hf_subset": "ben-eng",
12
+ "languages": [
13
+ "ben-Beng",
14
+ "eng-Latn"
15
+ ],
16
+ "main_score": 0.9934123847167324,
17
+ "precision": 0.9925889328063241,
18
+ "recall": 0.9950592885375494
19
+ },
20
+ {
21
+ "accuracy": 0.9861660079051383,
22
+ "f1": 0.9818840579710143,
23
+ "hf_subset": "eng-ben",
24
+ "languages": [
25
+ "eng-Latn",
26
+ "ben-Beng"
27
+ ],
28
+ "main_score": 0.9818840579710143,
29
+ "precision": 0.9799077733860342,
30
+ "recall": 0.9861660079051383
31
+ },
32
+ {
33
+ "accuracy": 0.9950592885375494,
34
+ "f1": 0.9934123847167324,
35
+ "hf_subset": "guj-eng",
36
+ "languages": [
37
+ "guj-Gujr",
38
+ "eng-Latn"
39
+ ],
40
+ "main_score": 0.9934123847167324,
41
+ "precision": 0.9925889328063241,
42
+ "recall": 0.9950592885375494
43
+ },
44
+ {
45
+ "accuracy": 0.9871541501976284,
46
+ "f1": 0.9834321475625823,
47
+ "hf_subset": "eng-guj",
48
+ "languages": [
49
+ "eng-Latn",
50
+ "guj-Gujr"
51
+ ],
52
+ "main_score": 0.9834321475625823,
53
+ "precision": 0.9818017127799736,
54
+ "recall": 0.9871541501976284
55
+ },
56
+ {
57
+ "accuracy": 1.0,
58
+ "f1": 1.0,
59
+ "hf_subset": "hin-eng",
60
+ "languages": [
61
+ "hin-Deva",
62
+ "eng-Latn"
63
+ ],
64
+ "main_score": 1.0,
65
+ "precision": 1.0,
66
+ "recall": 1.0
67
+ },
68
+ {
69
+ "accuracy": 0.9950592885375494,
70
+ "f1": 0.9934123847167324,
71
+ "hf_subset": "eng-hin",
72
+ "languages": [
73
+ "eng-Latn",
74
+ "hin-Deva"
75
+ ],
76
+ "main_score": 0.9934123847167324,
77
+ "precision": 0.9925889328063241,
78
+ "recall": 0.9950592885375494
79
+ },
80
+ {
81
+ "accuracy": 0.9930830039525692,
82
+ "f1": 0.9907773386034255,
83
+ "hf_subset": "kan-eng",
84
+ "languages": [
85
+ "kan-Knda",
86
+ "eng-Latn"
87
+ ],
88
+ "main_score": 0.9907773386034255,
89
+ "precision": 0.9896245059288538,
90
+ "recall": 0.9930830039525692
91
+ },
92
+ {
93
+ "accuracy": 0.983201581027668,
94
+ "f1": 0.9777667984189723,
95
+ "hf_subset": "eng-kan",
96
+ "languages": [
97
+ "eng-Latn",
98
+ "kan-Knda"
99
+ ],
100
+ "main_score": 0.9777667984189723,
101
+ "precision": 0.9751317523056654,
102
+ "recall": 0.983201581027668
103
+ },
104
+ {
105
+ "accuracy": 0.9782608695652174,
106
+ "f1": 0.972068511198946,
107
+ "hf_subset": "mal-eng",
108
+ "languages": [
109
+ "mal-Mlym",
110
+ "eng-Latn"
111
+ ],
112
+ "main_score": 0.972068511198946,
113
+ "precision": 0.9692852437417655,
114
+ "recall": 0.9782608695652174
115
+ },
116
+ {
117
+ "accuracy": 0.9644268774703557,
118
+ "f1": 0.954298418972332,
119
+ "hf_subset": "eng-mal",
120
+ "languages": [
121
+ "eng-Latn",
122
+ "mal-Mlym"
123
+ ],
124
+ "main_score": 0.954298418972332,
125
+ "precision": 0.9497459062676453,
126
+ "recall": 0.9644268774703557
127
+ },
128
+ {
129
+ "accuracy": 0.9970355731225297,
130
+ "f1": 0.9960474308300395,
131
+ "hf_subset": "mar-eng",
132
+ "languages": [
133
+ "mar-Deva",
134
+ "eng-Latn"
135
+ ],
136
+ "main_score": 0.9960474308300395,
137
+ "precision": 0.9955533596837944,
138
+ "recall": 0.9970355731225297
139
+ },
140
+ {
141
+ "accuracy": 0.991106719367589,
142
+ "f1": 0.9888010540184454,
143
+ "hf_subset": "eng-mar",
144
+ "languages": [
145
+ "eng-Latn",
146
+ "mar-Deva"
147
+ ],
148
+ "main_score": 0.9888010540184454,
149
+ "precision": 0.9878458498023716,
150
+ "recall": 0.991106719367589
151
+ },
152
+ {
153
+ "accuracy": 0.9960474308300395,
154
+ "f1": 0.994729907773386,
155
+ "hf_subset": "tam-eng",
156
+ "languages": [
157
+ "tam-Taml",
158
+ "eng-Latn"
159
+ ],
160
+ "main_score": 0.994729907773386,
161
+ "precision": 0.9940711462450593,
162
+ "recall": 0.9960474308300395
163
+ },
164
+ {
165
+ "accuracy": 0.9881422924901185,
166
+ "f1": 0.9843544137022397,
167
+ "hf_subset": "eng-tam",
168
+ "languages": [
169
+ "eng-Latn",
170
+ "tam-Taml"
171
+ ],
172
+ "main_score": 0.9843544137022397,
173
+ "precision": 0.9825428194993412,
174
+ "recall": 0.9881422924901185
175
+ },
176
+ {
177
+ "accuracy": 0.9980237154150198,
178
+ "f1": 0.9973649538866931,
179
+ "hf_subset": "tel-eng",
180
+ "languages": [
181
+ "tel-Telu",
182
+ "eng-Latn"
183
+ ],
184
+ "main_score": 0.9973649538866931,
185
+ "precision": 0.9970355731225297,
186
+ "recall": 0.9980237154150198
187
+ },
188
+ {
189
+ "accuracy": 0.9881422924901185,
190
+ "f1": 0.9843544137022399,
191
+ "hf_subset": "eng-tel",
192
+ "languages": [
193
+ "eng-Latn",
194
+ "tel-Telu"
195
+ ],
196
+ "main_score": 0.9843544137022399,
197
+ "precision": 0.9825428194993413,
198
+ "recall": 0.9881422924901185
199
+ },
200
+ {
201
+ "accuracy": 0.9970355731225297,
202
+ "f1": 0.9960474308300395,
203
+ "hf_subset": "urd-eng",
204
+ "languages": [
205
+ "urd-Arab",
206
+ "eng-Latn"
207
+ ],
208
+ "main_score": 0.9960474308300395,
209
+ "precision": 0.9955533596837944,
210
+ "recall": 0.9970355731225297
211
+ },
212
+ {
213
+ "accuracy": 0.9930830039525692,
214
+ "f1": 0.9907773386034255,
215
+ "hf_subset": "eng-urd",
216
+ "languages": [
217
+ "eng-Latn",
218
+ "urd-Arab"
219
+ ],
220
+ "main_score": 0.9907773386034255,
221
+ "precision": 0.9896245059288538,
222
+ "recall": 0.9930830039525692
223
+ },
224
+ {
225
+ "accuracy": 0.974308300395257,
226
+ "f1": 0.9664031620553359,
227
+ "hf_subset": "asm-eng",
228
+ "languages": [
229
+ "asm-Beng",
230
+ "eng-Latn"
231
+ ],
232
+ "main_score": 0.9664031620553359,
233
+ "precision": 0.9626152832674572,
234
+ "recall": 0.974308300395257
235
+ },
236
+ {
237
+ "accuracy": 0.9644268774703557,
238
+ "f1": 0.9540513833992095,
239
+ "hf_subset": "eng-asm",
240
+ "languages": [
241
+ "eng-Latn",
242
+ "asm-Beng"
243
+ ],
244
+ "main_score": 0.9540513833992095,
245
+ "precision": 0.9492753623188406,
246
+ "recall": 0.9644268774703557
247
+ },
248
+ {
249
+ "accuracy": 0.9486166007905138,
250
+ "f1": 0.9331357048748352,
251
+ "hf_subset": "bho-eng",
252
+ "languages": [
253
+ "bho-Deva",
254
+ "eng-Latn"
255
+ ],
256
+ "main_score": 0.9331357048748352,
257
+ "precision": 0.9260540184453229,
258
+ "recall": 0.9486166007905138
259
+ },
260
+ {
261
+ "accuracy": 0.9357707509881423,
262
+ "f1": 0.9161396574440053,
263
+ "hf_subset": "eng-bho",
264
+ "languages": [
265
+ "eng-Latn",
266
+ "bho-Deva"
267
+ ],
268
+ "main_score": 0.9161396574440053,
269
+ "precision": 0.9071146245059288,
270
+ "recall": 0.9357707509881423
271
+ },
272
+ {
273
+ "accuracy": 0.9802371541501976,
274
+ "f1": 0.974308300395257,
275
+ "hf_subset": "nep-eng",
276
+ "languages": [
277
+ "nep-Deva",
278
+ "eng-Latn"
279
+ ],
280
+ "main_score": 0.974308300395257,
281
+ "precision": 0.9715085638998682,
282
+ "recall": 0.9802371541501976
283
+ },
284
+ {
285
+ "accuracy": 0.974308300395257,
286
+ "f1": 0.9667325428194993,
287
+ "hf_subset": "eng-nep",
288
+ "languages": [
289
+ "eng-Latn",
290
+ "nep-Deva"
291
+ ],
292
+ "main_score": 0.9667325428194993,
293
+ "precision": 0.9631093544137022,
294
+ "recall": 0.974308300395257
295
+ },
296
+ {
297
+ "accuracy": 0.9733201581027668,
298
+ "f1": 0.965250329380764,
299
+ "hf_subset": "ory-eng",
300
+ "languages": [
301
+ "ory-Orya",
302
+ "eng-Latn"
303
+ ],
304
+ "main_score": 0.965250329380764,
305
+ "precision": 0.9614624505928854,
306
+ "recall": 0.9733201581027668
307
+ },
308
+ {
309
+ "accuracy": 0.9515810276679841,
310
+ "f1": 0.9375823451910409,
311
+ "hf_subset": "eng-ory",
312
+ "languages": [
313
+ "eng-Latn",
314
+ "ory-Orya"
315
+ ],
316
+ "main_score": 0.9375823451910409,
317
+ "precision": 0.9310276679841898,
318
+ "recall": 0.9515810276679841
319
+ },
320
+ {
321
+ "accuracy": 0.9851778656126482,
322
+ "f1": 0.9804018445322793,
323
+ "hf_subset": "pan-eng",
324
+ "languages": [
325
+ "pan-Guru",
326
+ "eng-Latn"
327
+ ],
328
+ "main_score": 0.9804018445322793,
329
+ "precision": 0.9780961791831357,
330
+ "recall": 0.9851778656126482
331
+ },
332
+ {
333
+ "accuracy": 0.9772727272727273,
334
+ "f1": 0.9698616600790514,
335
+ "hf_subset": "eng-pan",
336
+ "languages": [
337
+ "eng-Latn",
338
+ "pan-Guru"
339
+ ],
340
+ "main_score": 0.9698616600790514,
341
+ "precision": 0.9662384716732543,
342
+ "recall": 0.9772727272727273
343
+ },
344
+ {
345
+ "accuracy": 0.9733201581027668,
346
+ "f1": 0.9649209486166008,
347
+ "hf_subset": "pus-eng",
348
+ "languages": [
349
+ "pus-Arab",
350
+ "eng-Latn"
351
+ ],
352
+ "main_score": 0.9649209486166008,
353
+ "precision": 0.9609683794466403,
354
+ "recall": 0.9733201581027668
355
+ },
356
+ {
357
+ "accuracy": 0.967391304347826,
358
+ "f1": 0.9570158102766798,
359
+ "hf_subset": "eng-pus",
360
+ "languages": [
361
+ "eng-Latn",
362
+ "pus-Arab"
363
+ ],
364
+ "main_score": 0.9570158102766798,
365
+ "precision": 0.9520750988142292,
366
+ "recall": 0.967391304347826
367
+ },
368
+ {
369
+ "accuracy": 0.9565217391304348,
370
+ "f1": 0.9430171277997365,
371
+ "hf_subset": "san-eng",
372
+ "languages": [
373
+ "san-Deva",
374
+ "eng-Latn"
375
+ ],
376
+ "main_score": 0.9430171277997365,
377
+ "precision": 0.9367588932806324,
378
+ "recall": 0.9565217391304348
379
+ },
380
+ {
381
+ "accuracy": 0.9347826086956522,
382
+ "f1": 0.9169489930359496,
383
+ "hf_subset": "eng-san",
384
+ "languages": [
385
+ "eng-Latn",
386
+ "san-Deva"
387
+ ],
388
+ "main_score": 0.9169489930359496,
389
+ "precision": 0.9089920948616601,
390
+ "recall": 0.9347826086956522
391
+ },
392
+ {
393
+ "accuracy": 0.9752964426877471,
394
+ "f1": 0.9679512516469038,
395
+ "hf_subset": "awa-eng",
396
+ "languages": [
397
+ "awa-Deva",
398
+ "eng-Latn"
399
+ ],
400
+ "main_score": 0.9679512516469038,
401
+ "precision": 0.9645092226613966,
402
+ "recall": 0.9752964426877471
403
+ },
404
+ {
405
+ "accuracy": 0.9624505928853755,
406
+ "f1": 0.9517457180500659,
407
+ "hf_subset": "eng-awa",
408
+ "languages": [
409
+ "eng-Latn",
410
+ "awa-Deva"
411
+ ],
412
+ "main_score": 0.9517457180500659,
413
+ "precision": 0.9468050065876152,
414
+ "recall": 0.9624505928853755
415
+ },
416
+ {
417
+ "accuracy": 0.983201581027668,
418
+ "f1": 0.9777667984189723,
419
+ "hf_subset": "bgc-eng",
420
+ "languages": [
421
+ "bgc-Deva",
422
+ "eng-Latn"
423
+ ],
424
+ "main_score": 0.9777667984189723,
425
+ "precision": 0.9751317523056653,
426
+ "recall": 0.983201581027668
427
+ },
428
+ {
429
+ "accuracy": 0.9664031620553359,
430
+ "f1": 0.9555335968379447,
431
+ "hf_subset": "eng-bgc",
432
+ "languages": [
433
+ "eng-Latn",
434
+ "bgc-Deva"
435
+ ],
436
+ "main_score": 0.9555335968379447,
437
+ "precision": 0.9502635046113307,
438
+ "recall": 0.9664031620553359
439
+ },
440
+ {
441
+ "accuracy": 0.12845849802371542,
442
+ "f1": 0.10925277014281166,
443
+ "hf_subset": "bod-eng",
444
+ "languages": [
445
+ "bod-Tibt",
446
+ "eng-Latn"
447
+ ],
448
+ "main_score": 0.10925277014281166,
449
+ "precision": 0.10334746670633169,
450
+ "recall": 0.12845849802371542
451
+ },
452
+ {
453
+ "accuracy": 0.18280632411067194,
454
+ "f1": 0.11928418508834324,
455
+ "hf_subset": "eng-bod",
456
+ "languages": [
457
+ "eng-Latn",
458
+ "bod-Tibt"
459
+ ],
460
+ "main_score": 0.11928418508834324,
461
+ "precision": 0.10367904082083926,
462
+ "recall": 0.18280632411067194
463
+ },
464
+ {
465
+ "accuracy": 0.39723320158102765,
466
+ "f1": 0.3420402198811289,
467
+ "hf_subset": "boy-eng",
468
+ "languages": [
469
+ "boy-Deva",
470
+ "eng-Latn"
471
+ ],
472
+ "main_score": 0.3420402198811289,
473
+ "precision": 0.3248051195373966,
474
+ "recall": 0.39723320158102765
475
+ },
476
+ {
477
+ "accuracy": 0.40810276679841895,
478
+ "f1": 0.32751748142064346,
479
+ "hf_subset": "eng-boy",
480
+ "languages": [
481
+ "eng-Latn",
482
+ "boy-Deva"
483
+ ],
484
+ "main_score": 0.32751748142064346,
485
+ "precision": 0.3016884757947883,
486
+ "recall": 0.40810276679841895
487
+ },
488
+ {
489
+ "accuracy": 0.9772727272727273,
490
+ "f1": 0.9703557312252964,
491
+ "hf_subset": "gbm-eng",
492
+ "languages": [
493
+ "gbm-Deva",
494
+ "eng-Latn"
495
+ ],
496
+ "main_score": 0.9703557312252964,
497
+ "precision": 0.9670619235836628,
498
+ "recall": 0.9772727272727273
499
+ },
500
+ {
501
+ "accuracy": 0.9644268774703557,
502
+ "f1": 0.953227931488801,
503
+ "hf_subset": "eng-gbm",
504
+ "languages": [
505
+ "eng-Latn",
506
+ "gbm-Deva"
507
+ ],
508
+ "main_score": 0.953227931488801,
509
+ "precision": 0.9479578392621871,
510
+ "recall": 0.9644268774703557
511
+ },
512
+ {
513
+ "accuracy": 0.8932806324110671,
514
+ "f1": 0.8636034255599473,
515
+ "hf_subset": "gom-eng",
516
+ "languages": [
517
+ "gom-Deva",
518
+ "eng-Latn"
519
+ ],
520
+ "main_score": 0.8636034255599473,
521
+ "precision": 0.8503293807641633,
522
+ "recall": 0.8932806324110671
523
+ },
524
+ {
525
+ "accuracy": 0.8764822134387352,
526
+ "f1": 0.8405467720685111,
527
+ "hf_subset": "eng-gom",
528
+ "languages": [
529
+ "eng-Latn",
530
+ "gom-Deva"
531
+ ],
532
+ "main_score": 0.8405467720685111,
533
+ "precision": 0.8241436100131752,
534
+ "recall": 0.8764822134387352
535
+ },
536
+ {
537
+ "accuracy": 0.9723320158102767,
538
+ "f1": 0.9640645586297759,
539
+ "hf_subset": "hne-eng",
540
+ "languages": [
541
+ "hne-Deva",
542
+ "eng-Latn"
543
+ ],
544
+ "main_score": 0.9640645586297759,
545
+ "precision": 0.9603096179183135,
546
+ "recall": 0.9723320158102767
547
+ },
548
+ {
549
+ "accuracy": 0.9486166007905138,
550
+ "f1": 0.9334180312441183,
551
+ "hf_subset": "eng-hne",
552
+ "languages": [
553
+ "eng-Latn",
554
+ "hne-Deva"
555
+ ],
556
+ "main_score": 0.9334180312441183,
557
+ "precision": 0.926548089591568,
558
+ "recall": 0.9486166007905138
559
+ },
560
+ {
561
+ "accuracy": 0.983201581027668,
562
+ "f1": 0.9782608695652174,
563
+ "hf_subset": "raj-eng",
564
+ "languages": [
565
+ "raj-Deva",
566
+ "eng-Latn"
567
+ ],
568
+ "main_score": 0.9782608695652174,
569
+ "precision": 0.9759552042160737,
570
+ "recall": 0.983201581027668
571
+ },
572
+ {
573
+ "accuracy": 0.9693675889328063,
574
+ "f1": 0.9598155467720685,
575
+ "hf_subset": "eng-raj",
576
+ "languages": [
577
+ "eng-Latn",
578
+ "raj-Deva"
579
+ ],
580
+ "main_score": 0.9598155467720685,
581
+ "precision": 0.9552371541501977,
582
+ "recall": 0.9693675889328063
583
+ },
584
+ {
585
+ "accuracy": 0.908102766798419,
586
+ "f1": 0.8837435621032459,
587
+ "hf_subset": "mai-eng",
588
+ "languages": [
589
+ "mai-Deva",
590
+ "eng-Latn"
591
+ ],
592
+ "main_score": 0.8837435621032459,
593
+ "precision": 0.8729578392621872,
594
+ "recall": 0.908102766798419
595
+ },
596
+ {
597
+ "accuracy": 0.8656126482213439,
598
+ "f1": 0.8283126293995859,
599
+ "hf_subset": "eng-mai",
600
+ "languages": [
601
+ "eng-Latn",
602
+ "mai-Deva"
603
+ ],
604
+ "main_score": 0.8283126293995859,
605
+ "precision": 0.8123847167325429,
606
+ "recall": 0.8656126482213439
607
+ },
608
+ {
609
+ "accuracy": 0.7924901185770751,
610
+ "f1": 0.7397123407992974,
611
+ "hf_subset": "mni-eng",
612
+ "languages": [
613
+ "mni-Mtei",
614
+ "eng-Latn"
615
+ ],
616
+ "main_score": 0.7397123407992974,
617
+ "precision": 0.7174489459815546,
618
+ "recall": 0.7924901185770751
619
+ },
620
+ {
621
+ "accuracy": 0.8013833992094862,
622
+ "f1": 0.7486824769433466,
623
+ "hf_subset": "eng-mni",
624
+ "languages": [
625
+ "eng-Latn",
626
+ "mni-Mtei"
627
+ ],
628
+ "main_score": 0.7486824769433466,
629
+ "precision": 0.7254117259552042,
630
+ "recall": 0.8013833992094862
631
+ },
632
+ {
633
+ "accuracy": 0.9881422924901185,
634
+ "f1": 0.9841897233201581,
635
+ "hf_subset": "mup-eng",
636
+ "languages": [
637
+ "mup-Deva",
638
+ "eng-Latn"
639
+ ],
640
+ "main_score": 0.9841897233201581,
641
+ "precision": 0.9822134387351779,
642
+ "recall": 0.9881422924901185
643
+ },
644
+ {
645
+ "accuracy": 0.9703557312252964,
646
+ "f1": 0.961627140974967,
647
+ "hf_subset": "eng-mup",
648
+ "languages": [
649
+ "eng-Latn",
650
+ "mup-Deva"
651
+ ],
652
+ "main_score": 0.961627140974967,
653
+ "precision": 0.9575428194993412,
654
+ "recall": 0.9703557312252964
655
+ },
656
+ {
657
+ "accuracy": 0.9861660079051383,
658
+ "f1": 0.9823781291172595,
659
+ "hf_subset": "mwr-eng",
660
+ "languages": [
661
+ "mwr-Deva",
662
+ "eng-Latn"
663
+ ],
664
+ "main_score": 0.9823781291172595,
665
+ "precision": 0.980566534914361,
666
+ "recall": 0.9861660079051383
667
+ },
668
+ {
669
+ "accuracy": 0.9713438735177866,
670
+ "f1": 0.9629446640316206,
671
+ "hf_subset": "eng-mwr",
672
+ "languages": [
673
+ "eng-Latn",
674
+ "mwr-Deva"
675
+ ],
676
+ "main_score": 0.9629446640316206,
677
+ "precision": 0.9590250329380764,
678
+ "recall": 0.9713438735177866
679
+ },
680
+ {
681
+ "accuracy": 0.03557312252964427,
682
+ "f1": 0.02496892391680052,
683
+ "hf_subset": "sat-eng",
684
+ "languages": [
685
+ "sat-Olck",
686
+ "eng-Latn"
687
+ ],
688
+ "main_score": 0.02496892391680052,
689
+ "precision": 0.023038056245050193,
690
+ "recall": 0.03557312252964427
691
+ },
692
+ {
693
+ "accuracy": 0.037549407114624504,
694
+ "f1": 0.01564837413820162,
695
+ "hf_subset": "eng-sat",
696
+ "languages": [
697
+ "eng-Latn",
698
+ "sat-Olck"
699
+ ],
700
+ "main_score": 0.01564837413820162,
701
+ "precision": 0.011935062476894331,
702
+ "recall": 0.037549407114624504
703
+ }
704
+ ],
705
+ "validation": [
706
+ {
707
+ "accuracy": 0.9889669007021064,
708
+ "f1": 0.9852892009361418,
709
+ "hf_subset": "ben-eng",
710
+ "languages": [
711
+ "ben-Beng",
712
+ "eng-Latn"
713
+ ],
714
+ "main_score": 0.9852892009361418,
715
+ "precision": 0.9834503510531595,
716
+ "recall": 0.9889669007021064
717
+ },
718
+ {
719
+ "accuracy": 0.9779338014042126,
720
+ "f1": 0.970912738214644,
721
+ "hf_subset": "eng-ben",
722
+ "languages": [
723
+ "eng-Latn",
724
+ "ben-Beng"
725
+ ],
726
+ "main_score": 0.970912738214644,
727
+ "precision": 0.9675693747910399,
728
+ "recall": 0.9779338014042126
729
+ },
730
+ {
731
+ "accuracy": 0.9969909729187563,
732
+ "f1": 0.995987963891675,
733
+ "hf_subset": "guj-eng",
734
+ "languages": [
735
+ "guj-Gujr",
736
+ "eng-Latn"
737
+ ],
738
+ "main_score": 0.995987963891675,
739
+ "precision": 0.9954864593781344,
740
+ "recall": 0.9969909729187563
741
+ },
742
+ {
743
+ "accuracy": 0.9829488465396189,
744
+ "f1": 0.9772651287194918,
745
+ "hf_subset": "eng-guj",
746
+ "languages": [
747
+ "eng-Latn",
748
+ "guj-Gujr"
749
+ ],
750
+ "main_score": 0.9772651287194918,
751
+ "precision": 0.9744232698094283,
752
+ "recall": 0.9829488465396189
753
+ },
754
+ {
755
+ "accuracy": 0.9979939819458375,
756
+ "f1": 0.9973253092611166,
757
+ "hf_subset": "hin-eng",
758
+ "languages": [
759
+ "hin-Deva",
760
+ "eng-Latn"
761
+ ],
762
+ "main_score": 0.9973253092611166,
763
+ "precision": 0.9969909729187563,
764
+ "recall": 0.9979939819458375
765
+ },
766
+ {
767
+ "accuracy": 0.995987963891675,
768
+ "f1": 0.9946506185222334,
769
+ "hf_subset": "eng-hin",
770
+ "languages": [
771
+ "eng-Latn",
772
+ "hin-Deva"
773
+ ],
774
+ "main_score": 0.9946506185222334,
775
+ "precision": 0.9939819458375125,
776
+ "recall": 0.995987963891675
777
+ },
778
+ {
779
+ "accuracy": 0.9859578736208626,
780
+ "f1": 0.9812771648278167,
781
+ "hf_subset": "kan-eng",
782
+ "languages": [
783
+ "kan-Knda",
784
+ "eng-Latn"
785
+ ],
786
+ "main_score": 0.9812771648278167,
787
+ "precision": 0.9789368104312939,
788
+ "recall": 0.9859578736208626
789
+ },
790
+ {
791
+ "accuracy": 0.9809428284854563,
792
+ "f1": 0.9747576061517886,
793
+ "hf_subset": "eng-kan",
794
+ "languages": [
795
+ "eng-Latn",
796
+ "kan-Knda"
797
+ ],
798
+ "main_score": 0.9747576061517886,
799
+ "precision": 0.971748579070545,
800
+ "recall": 0.9809428284854563
801
+ },
802
+ {
803
+ "accuracy": 0.9749247743229689,
804
+ "f1": 0.9675693747910398,
805
+ "hf_subset": "mal-eng",
806
+ "languages": [
807
+ "mal-Mlym",
808
+ "eng-Latn"
809
+ ],
810
+ "main_score": 0.9675693747910398,
811
+ "precision": 0.9640588431962555,
812
+ "recall": 0.9749247743229689
813
+ },
814
+ {
815
+ "accuracy": 0.958876629889669,
816
+ "f1": 0.9456703443664326,
817
+ "hf_subset": "eng-mal",
818
+ "languages": [
819
+ "eng-Latn",
820
+ "mal-Mlym"
821
+ ],
822
+ "main_score": 0.9456703443664326,
823
+ "precision": 0.9393179538615848,
824
+ "recall": 0.958876629889669
825
+ },
826
+ {
827
+ "accuracy": 0.9979939819458375,
828
+ "f1": 0.9973253092611166,
829
+ "hf_subset": "mar-eng",
830
+ "languages": [
831
+ "mar-Deva",
832
+ "eng-Latn"
833
+ ],
834
+ "main_score": 0.9973253092611166,
835
+ "precision": 0.9969909729187563,
836
+ "recall": 0.9979939819458375
837
+ },
838
+ {
839
+ "accuracy": 0.9889669007021064,
840
+ "f1": 0.9854563691073219,
841
+ "hf_subset": "eng-mar",
842
+ "languages": [
843
+ "eng-Latn",
844
+ "mar-Deva"
845
+ ],
846
+ "main_score": 0.9854563691073219,
847
+ "precision": 0.9837846873955199,
848
+ "recall": 0.9889669007021064
849
+ },
850
+ {
851
+ "accuracy": 0.9949849548645938,
852
+ "f1": 0.9933132731527916,
853
+ "hf_subset": "tam-eng",
854
+ "languages": [
855
+ "tam-Taml",
856
+ "eng-Latn"
857
+ ],
858
+ "main_score": 0.9933132731527916,
859
+ "precision": 0.9924774322968907,
860
+ "recall": 0.9949849548645938
861
+ },
862
+ {
863
+ "accuracy": 0.9859578736208626,
864
+ "f1": 0.981444332998997,
865
+ "hf_subset": "eng-tam",
866
+ "languages": [
867
+ "eng-Latn",
868
+ "tam-Taml"
869
+ ],
870
+ "main_score": 0.981444332998997,
871
+ "precision": 0.9792711467736543,
872
+ "recall": 0.9859578736208626
873
+ },
874
+ {
875
+ "accuracy": 0.9919759277833501,
876
+ "f1": 0.9893012370444668,
877
+ "hf_subset": "tel-eng",
878
+ "languages": [
879
+ "tel-Telu",
880
+ "eng-Latn"
881
+ ],
882
+ "main_score": 0.9893012370444668,
883
+ "precision": 0.9879638916750251,
884
+ "recall": 0.9919759277833501
885
+ },
886
+ {
887
+ "accuracy": 0.9859578736208626,
888
+ "f1": 0.9812771648278167,
889
+ "hf_subset": "eng-tel",
890
+ "languages": [
891
+ "eng-Latn",
892
+ "tel-Telu"
893
+ ],
894
+ "main_score": 0.9812771648278167,
895
+ "precision": 0.9789368104312939,
896
+ "recall": 0.9859578736208626
897
+ },
898
+ {
899
+ "accuracy": 0.995987963891675,
900
+ "f1": 0.9946506185222334,
901
+ "hf_subset": "urd-eng",
902
+ "languages": [
903
+ "urd-Arab",
904
+ "eng-Latn"
905
+ ],
906
+ "main_score": 0.9946506185222334,
907
+ "precision": 0.9939819458375125,
908
+ "recall": 0.995987963891675
909
+ },
910
+ {
911
+ "accuracy": 0.9889669007021064,
912
+ "f1": 0.9852892009361417,
913
+ "hf_subset": "eng-urd",
914
+ "languages": [
915
+ "eng-Latn",
916
+ "urd-Arab"
917
+ ],
918
+ "main_score": 0.9852892009361417,
919
+ "precision": 0.9834503510531595,
920
+ "recall": 0.9889669007021064
921
+ },
922
+ {
923
+ "accuracy": 0.970912738214644,
924
+ "f1": 0.9617184887997325,
925
+ "hf_subset": "asm-eng",
926
+ "languages": [
927
+ "asm-Beng",
928
+ "eng-Latn"
929
+ ],
930
+ "main_score": 0.9617184887997325,
931
+ "precision": 0.9573721163490472,
932
+ "recall": 0.970912738214644
933
+ },
934
+ {
935
+ "accuracy": 0.9578736208625878,
936
+ "f1": 0.945068538950184,
937
+ "hf_subset": "eng-asm",
938
+ "languages": [
939
+ "eng-Latn",
940
+ "asm-Beng"
941
+ ],
942
+ "main_score": 0.945068538950184,
943
+ "precision": 0.9390672016048145,
944
+ "recall": 0.9578736208625878
945
+ },
946
+ {
947
+ "accuracy": 0.9488465396188566,
948
+ "f1": 0.933032430625209,
949
+ "hf_subset": "bho-eng",
950
+ "languages": [
951
+ "bho-Deva",
952
+ "eng-Latn"
953
+ ],
954
+ "main_score": 0.933032430625209,
955
+ "precision": 0.9255265797392177,
956
+ "recall": 0.9488465396188566
957
+ },
958
+ {
959
+ "accuracy": 0.9107321965897693,
960
+ "f1": 0.8852223336676697,
961
+ "hf_subset": "eng-bho",
962
+ "languages": [
963
+ "eng-Latn",
964
+ "bho-Deva"
965
+ ],
966
+ "main_score": 0.8852223336676697,
967
+ "precision": 0.8736041457706453,
968
+ "recall": 0.9107321965897693
969
+ },
970
+ {
971
+ "accuracy": 0.9809428284854563,
972
+ "f1": 0.9750919424941491,
973
+ "hf_subset": "nep-eng",
974
+ "languages": [
975
+ "nep-Deva",
976
+ "eng-Latn"
977
+ ],
978
+ "main_score": 0.9750919424941491,
979
+ "precision": 0.9722500835840855,
980
+ "recall": 0.9809428284854563
981
+ },
982
+ {
983
+ "accuracy": 0.9689067201604814,
984
+ "f1": 0.9590437980608493,
985
+ "hf_subset": "eng-nep",
986
+ "languages": [
987
+ "eng-Latn",
988
+ "nep-Deva"
989
+ ],
990
+ "main_score": 0.9590437980608493,
991
+ "precision": 0.9543630892678034,
992
+ "recall": 0.9689067201604814
993
+ },
994
+ {
995
+ "accuracy": 0.9739217652958877,
996
+ "f1": 0.9659645603477097,
997
+ "hf_subset": "ory-eng",
998
+ "languages": [
999
+ "ory-Orya",
1000
+ "eng-Latn"
1001
+ ],
1002
+ "main_score": 0.9659645603477097,
1003
+ "precision": 0.9621364092276831,
1004
+ "recall": 0.9739217652958877
1005
+ },
1006
+ {
1007
+ "accuracy": 0.9458375125376128,
1008
+ "f1": 0.9286191909060514,
1009
+ "hf_subset": "eng-ory",
1010
+ "languages": [
1011
+ "eng-Latn",
1012
+ "ory-Orya"
1013
+ ],
1014
+ "main_score": 0.9286191909060514,
1015
+ "precision": 0.9202607823470411,
1016
+ "recall": 0.9458375125376128
1017
+ },
1018
+ {
1019
+ "accuracy": 0.9819458375125376,
1020
+ "f1": 0.9762621196924105,
1021
+ "hf_subset": "pan-eng",
1022
+ "languages": [
1023
+ "pan-Guru",
1024
+ "eng-Latn"
1025
+ ],
1026
+ "main_score": 0.9762621196924105,
1027
+ "precision": 0.973420260782347,
1028
+ "recall": 0.9819458375125376
1029
+ },
1030
+ {
1031
+ "accuracy": 0.9648946840521565,
1032
+ "f1": 0.9536944165830825,
1033
+ "hf_subset": "eng-pan",
1034
+ "languages": [
1035
+ "eng-Latn",
1036
+ "pan-Guru"
1037
+ ],
1038
+ "main_score": 0.9536944165830825,
1039
+ "precision": 0.948345035105316,
1040
+ "recall": 0.9648946840521565
1041
+ },
1042
+ {
1043
+ "accuracy": 0.9739217652958877,
1044
+ "f1": 0.965229020394517,
1045
+ "hf_subset": "pus-eng",
1046
+ "languages": [
1047
+ "pus-Arab",
1048
+ "eng-Latn"
1049
+ ],
1050
+ "main_score": 0.965229020394517,
1051
+ "precision": 0.9608826479438315,
1052
+ "recall": 0.9739217652958877
1053
+ },
1054
+ {
1055
+ "accuracy": 0.9568706118355065,
1056
+ "f1": 0.9430625208960213,
1057
+ "hf_subset": "eng-pus",
1058
+ "languages": [
1059
+ "eng-Latn",
1060
+ "pus-Arab"
1061
+ ],
1062
+ "main_score": 0.9430625208960213,
1063
+ "precision": 0.936392510865931,
1064
+ "recall": 0.9568706118355065
1065
+ },
1066
+ {
1067
+ "accuracy": 0.9358074222668004,
1068
+ "f1": 0.9154129053828151,
1069
+ "hf_subset": "san-eng",
1070
+ "languages": [
1071
+ "san-Deva",
1072
+ "eng-Latn"
1073
+ ],
1074
+ "main_score": 0.9154129053828151,
1075
+ "precision": 0.9055499832831828,
1076
+ "recall": 0.9358074222668004
1077
+ },
1078
+ {
1079
+ "accuracy": 0.9307923771313942,
1080
+ "f1": 0.9092276830491475,
1081
+ "hf_subset": "eng-san",
1082
+ "languages": [
1083
+ "eng-Latn",
1084
+ "san-Deva"
1085
+ ],
1086
+ "main_score": 0.9092276830491475,
1087
+ "precision": 0.8990304246071548,
1088
+ "recall": 0.9307923771313942
1089
+ },
1090
+ {
1091
+ "accuracy": 0.9739217652958877,
1092
+ "f1": 0.9671347375459712,
1093
+ "hf_subset": "awa-eng",
1094
+ "languages": [
1095
+ "awa-Deva",
1096
+ "eng-Latn"
1097
+ ],
1098
+ "main_score": 0.9671347375459712,
1099
+ "precision": 0.9639752591106653,
1100
+ "recall": 0.9739217652958877
1101
+ },
1102
+ {
1103
+ "accuracy": 0.9568706118355065,
1104
+ "f1": 0.9442995653627548,
1105
+ "hf_subset": "eng-awa",
1106
+ "languages": [
1107
+ "eng-Latn",
1108
+ "awa-Deva"
1109
+ ],
1110
+ "main_score": 0.9442995653627548,
1111
+ "precision": 0.9384821130056836,
1112
+ "recall": 0.9568706118355065
1113
+ },
1114
+ {
1115
+ "accuracy": 0.9859578736208626,
1116
+ "f1": 0.9812771648278167,
1117
+ "hf_subset": "bgc-eng",
1118
+ "languages": [
1119
+ "bgc-Deva",
1120
+ "eng-Latn"
1121
+ ],
1122
+ "main_score": 0.9812771648278167,
1123
+ "precision": 0.9789368104312939,
1124
+ "recall": 0.9859578736208626
1125
+ },
1126
+ {
1127
+ "accuracy": 0.970912738214644,
1128
+ "f1": 0.9619525242393847,
1129
+ "hf_subset": "eng-bgc",
1130
+ "languages": [
1131
+ "eng-Latn",
1132
+ "bgc-Deva"
1133
+ ],
1134
+ "main_score": 0.9619525242393847,
1135
+ "precision": 0.9577900367769976,
1136
+ "recall": 0.970912738214644
1137
+ },
1138
+ {
1139
+ "accuracy": 0.1444332998996991,
1140
+ "f1": 0.12321183258426603,
1141
+ "hf_subset": "bod-eng",
1142
+ "languages": [
1143
+ "bod-Tibt",
1144
+ "eng-Latn"
1145
+ ],
1146
+ "main_score": 0.12321183258426603,
1147
+ "precision": 0.11695629088573045,
1148
+ "recall": 0.1444332998996991
1149
+ },
1150
+ {
1151
+ "accuracy": 0.18254764292878636,
1152
+ "f1": 0.1173866510726937,
1153
+ "hf_subset": "eng-bod",
1154
+ "languages": [
1155
+ "eng-Latn",
1156
+ "bod-Tibt"
1157
+ ],
1158
+ "main_score": 0.1173866510726937,
1159
+ "precision": 0.0997695791533186,
1160
+ "recall": 0.18254764292878636
1161
+ },
1162
+ {
1163
+ "accuracy": 0.40421263791374124,
1164
+ "f1": 0.33984380992906565,
1165
+ "hf_subset": "boy-eng",
1166
+ "languages": [
1167
+ "boy-Deva",
1168
+ "eng-Latn"
1169
+ ],
1170
+ "main_score": 0.33984380992906565,
1171
+ "precision": 0.3193505480366062,
1172
+ "recall": 0.40421263791374124
1173
+ },
1174
+ {
1175
+ "accuracy": 0.4172517552657974,
1176
+ "f1": 0.33611330696968417,
1177
+ "hf_subset": "eng-boy",
1178
+ "languages": [
1179
+ "eng-Latn",
1180
+ "boy-Deva"
1181
+ ],
1182
+ "main_score": 0.33611330696968417,
1183
+ "precision": 0.3089093296263721,
1184
+ "recall": 0.4172517552657974
1185
+ },
1186
+ {
1187
+ "accuracy": 0.9799398194583752,
1188
+ "f1": 0.9739217652958877,
1189
+ "hf_subset": "gbm-eng",
1190
+ "languages": [
1191
+ "gbm-Deva",
1192
+ "eng-Latn"
1193
+ ],
1194
+ "main_score": 0.9739217652958877,
1195
+ "precision": 0.9710799063858242,
1196
+ "recall": 0.9799398194583752
1197
+ },
1198
+ {
1199
+ "accuracy": 0.9538615847542627,
1200
+ "f1": 0.9391507856904044,
1201
+ "hf_subset": "eng-gbm",
1202
+ "languages": [
1203
+ "eng-Latn",
1204
+ "gbm-Deva"
1205
+ ],
1206
+ "main_score": 0.9391507856904044,
1207
+ "precision": 0.9321297225008358,
1208
+ "recall": 0.9538615847542627
1209
+ },
1210
+ {
1211
+ "accuracy": 0.8916750250752257,
1212
+ "f1": 0.861250417920428,
1213
+ "hf_subset": "gom-eng",
1214
+ "languages": [
1215
+ "gom-Deva",
1216
+ "eng-Latn"
1217
+ ],
1218
+ "main_score": 0.861250417920428,
1219
+ "precision": 0.8470745570043463,
1220
+ "recall": 0.8916750250752257
1221
+ },
1222
+ {
1223
+ "accuracy": 0.8645937813440321,
1224
+ "f1": 0.8243396857238381,
1225
+ "hf_subset": "eng-gom",
1226
+ "languages": [
1227
+ "eng-Latn",
1228
+ "gom-Deva"
1229
+ ],
1230
+ "main_score": 0.8243396857238381,
1231
+ "precision": 0.8061685055165496,
1232
+ "recall": 0.8645937813440321
1233
+ },
1234
+ {
1235
+ "accuracy": 0.954864593781344,
1236
+ "f1": 0.9407221664994984,
1237
+ "hf_subset": "hne-eng",
1238
+ "languages": [
1239
+ "hne-Deva",
1240
+ "eng-Latn"
1241
+ ],
1242
+ "main_score": 0.9407221664994984,
1243
+ "precision": 0.9340521564694082,
1244
+ "recall": 0.954864593781344
1245
+ },
1246
+ {
1247
+ "accuracy": 0.9348044132397192,
1248
+ "f1": 0.9148779672350383,
1249
+ "hf_subset": "eng-hne",
1250
+ "languages": [
1251
+ "eng-Latn",
1252
+ "hne-Deva"
1253
+ ],
1254
+ "main_score": 0.9148779672350383,
1255
+ "precision": 0.9055499832831829,
1256
+ "recall": 0.9348044132397192
1257
+ },
1258
+ {
1259
+ "accuracy": 0.9719157472417251,
1260
+ "f1": 0.9627214978268138,
1261
+ "hf_subset": "raj-eng",
1262
+ "languages": [
1263
+ "raj-Deva",
1264
+ "eng-Latn"
1265
+ ],
1266
+ "main_score": 0.9627214978268138,
1267
+ "precision": 0.9582079572049482,
1268
+ "recall": 0.9719157472417251
1269
+ },
1270
+ {
1271
+ "accuracy": 0.958876629889669,
1272
+ "f1": 0.9453360080240722,
1273
+ "hf_subset": "eng-raj",
1274
+ "languages": [
1275
+ "eng-Latn",
1276
+ "raj-Deva"
1277
+ ],
1278
+ "main_score": 0.9453360080240722,
1279
+ "precision": 0.938649281176864,
1280
+ "recall": 0.958876629889669
1281
+ },
1282
+ {
1283
+ "accuracy": 0.8906720160481444,
1284
+ "f1": 0.8590963366289344,
1285
+ "hf_subset": "mai-eng",
1286
+ "languages": [
1287
+ "mai-Deva",
1288
+ "eng-Latn"
1289
+ ],
1290
+ "main_score": 0.8590963366289344,
1291
+ "precision": 0.8450183884988298,
1292
+ "recall": 0.8906720160481444
1293
+ },
1294
+ {
1295
+ "accuracy": 0.8425275827482447,
1296
+ "f1": 0.7976787505373262,
1297
+ "hf_subset": "eng-mai",
1298
+ "languages": [
1299
+ "eng-Latn",
1300
+ "mai-Deva"
1301
+ ],
1302
+ "main_score": 0.7976787505373262,
1303
+ "precision": 0.7781176863925109,
1304
+ "recall": 0.8425275827482447
1305
+ },
1306
+ {
1307
+ "accuracy": 0.7773319959879639,
1308
+ "f1": 0.7202154081291493,
1309
+ "hf_subset": "mni-eng",
1310
+ "languages": [
1311
+ "mni-Mtei",
1312
+ "eng-Latn"
1313
+ ],
1314
+ "main_score": 0.7202154081291493,
1315
+ "precision": 0.6969002244829726,
1316
+ "recall": 0.7773319959879639
1317
+ },
1318
+ {
1319
+ "accuracy": 0.7963891675025075,
1320
+ "f1": 0.7400534938147777,
1321
+ "hf_subset": "eng-mni",
1322
+ "languages": [
1323
+ "eng-Latn",
1324
+ "mni-Mtei"
1325
+ ],
1326
+ "main_score": 0.7400534938147777,
1327
+ "precision": 0.7156803744567035,
1328
+ "recall": 0.7963891675025075
1329
+ },
1330
+ {
1331
+ "accuracy": 0.9839518555667001,
1332
+ "f1": 0.9789368104312939,
1333
+ "hf_subset": "mup-eng",
1334
+ "languages": [
1335
+ "mup-Deva",
1336
+ "eng-Latn"
1337
+ ],
1338
+ "main_score": 0.9789368104312939,
1339
+ "precision": 0.9764292878635907,
1340
+ "recall": 0.9839518555667001
1341
+ },
1342
+ {
1343
+ "accuracy": 0.9689067201604814,
1344
+ "f1": 0.9587094617184888,
1345
+ "hf_subset": "eng-mup",
1346
+ "languages": [
1347
+ "eng-Latn",
1348
+ "mup-Deva"
1349
+ ],
1350
+ "main_score": 0.9587094617184888,
1351
+ "precision": 0.9536944165830825,
1352
+ "recall": 0.9689067201604814
1353
+ },
1354
+ {
1355
+ "accuracy": 0.9829488465396189,
1356
+ "f1": 0.9772651287194918,
1357
+ "hf_subset": "mwr-eng",
1358
+ "languages": [
1359
+ "mwr-Deva",
1360
+ "eng-Latn"
1361
+ ],
1362
+ "main_score": 0.9772651287194918,
1363
+ "precision": 0.9744232698094283,
1364
+ "recall": 0.9829488465396189
1365
+ },
1366
+ {
1367
+ "accuracy": 0.9699097291875627,
1368
+ "f1": 0.9602139752591107,
1369
+ "hf_subset": "eng-mwr",
1370
+ "languages": [
1371
+ "eng-Latn",
1372
+ "mwr-Deva"
1373
+ ],
1374
+ "main_score": 0.9602139752591107,
1375
+ "precision": 0.9555332664660648,
1376
+ "recall": 0.9699097291875627
1377
+ },
1378
+ {
1379
+ "accuracy": 0.02708124373119358,
1380
+ "f1": 0.018845325537793813,
1381
+ "hf_subset": "sat-eng",
1382
+ "languages": [
1383
+ "sat-Olck",
1384
+ "eng-Latn"
1385
+ ],
1386
+ "main_score": 0.018845325537793813,
1387
+ "precision": 0.01770861990725649,
1388
+ "recall": 0.02708124373119358
1389
+ },
1390
+ {
1391
+ "accuracy": 0.024072216649949848,
1392
+ "f1": 0.007311291077702955,
1393
+ "hf_subset": "eng-sat",
1394
+ "languages": [
1395
+ "eng-Latn",
1396
+ "sat-Olck"
1397
+ ],
1398
+ "main_score": 0.007311291077702955,
1399
+ "precision": 0.005266642593569026,
1400
+ "recall": 0.024072216649949848
1401
+ }
1402
+ ]
1403
+ },
1404
+ "task_name": "IndicGenBenchFloresBitextMining"
1405
+ }