Commit
c1f9f66
·
verified ·
1 Parent(s): 7b3ea75

Update readme.md

Browse files
Files changed (1) hide show
  1. README.md +183 -23
README.md CHANGED
@@ -62,46 +62,206 @@ model-index:
62
  results:
63
  - dataset:
64
  config: ar
65
- name: MTEB MIRACLRetrievalHardNegatives (ar)
66
- revision: 95c8db7d4a6e9c1d8a60601afd63d553ae20a2eb
67
- split: dev
68
- type: mteb/miracl-hard-negatives
69
  metrics:
70
- - type: main_score
71
- value: 18.836
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
72
  task:
73
  type: Retrieval
74
  - dataset:
75
- config: ara-ara
76
- name: MTEB MLQARetrieval (ara-ara)
77
- revision: 397ed406c1a7902140303e7faf60fff35b58d285
78
- split: test
79
- type: facebook/mlqa
80
  metrics:
81
- - type: main_score
82
- value: 61.582
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
83
  task:
84
  type: Retrieval
85
  - dataset:
86
  config: ar
87
- name: MTEB MintakaRetrieval (ar)
88
- revision: efa78cc2f74bbcd21eff2261f9e13aebe40b814e
89
- split: test
90
- type: jinaai/mintakaqa
91
  metrics:
92
- - type: main_score
93
- value: 14.585
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
94
  task:
95
  type: Retrieval
96
  - dataset:
97
  config: default
98
- name: MTEB SadeemQuestionRetrieval (default)
99
  revision: 3cb0752b182e5d5d740df547748b06663c8e0bd9
100
  split: test
101
- type: sadeem-ai/sadeem-ar-eval-retrieval-questions
102
  metrics:
103
- - type: main_score
104
- value: 57.653
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
105
  task:
106
  type: Retrieval
107
  - dataset:
 
62
  results:
63
  - dataset:
64
  config: ar
65
+ name: MTEB MintakaRetrieval (ar)
66
+ revision: efa78cc2f74bbcd21eff2261f9e13aebe40b814e
67
+ split: test
68
+ type: mintaka/mmteb-mintaka
69
  metrics:
70
+ - type: main_score
71
+ value: 14.585
72
+ - type: map_at_1
73
+ value: 8.352
74
+ - type: map_at_3
75
+ value: 10.917
76
+ - type: map_at_5
77
+ value: 11.634
78
+ - type: map_at_10
79
+ value: 12.254
80
+ - type: ndcg_at_1
81
+ value: 8.352
82
+ - type: ndcg_at_3
83
+ value: 11.794
84
+ - type: ndcg_at_5
85
+ value: 13.085
86
+ - type: ndcg_at_10
87
+ value: 14.585
88
+ - type: recall_at_1
89
+ value: 8.352
90
+ - type: recall_at_3
91
+ value: 14.344
92
+ - type: recall_at_5
93
+ value: 17.476
94
+ - type: recall_at_10
95
+ value: 22.106
96
+ - type: precision_at_1
97
+ value: 8.352
98
+ - type: precision_at_3
99
+ value: 4.781
100
+ - type: precision_at_5
101
+ value: 3.495
102
+ - type: precision_at_10
103
+ value: 2.211
104
+ - type: mrr_at_1
105
+ value: 8.3522
106
+ - type: mrr_at_3
107
+ value: 10.9169
108
+ - type: mrr_at_5
109
+ value: 11.6341
110
+ - type: mrr_at_10
111
+ value: 12.2543
112
  task:
113
  type: Retrieval
114
  - dataset:
115
+ config: ar
116
+ name: MTEB MIRACLRetrievalHardNegatives (ar)
117
+ revision: 95c8db7d4a6e9c1d8a60601afd63d553ae20a2eb
118
+ split: dev
119
+ type: miracl/mmteb-miracl-hardnegatives
120
  metrics:
121
+ - type: main_score
122
+ value: 18.836
123
+ - type: map_at_1
124
+ value: 6.646
125
+ - type: map_at_3
126
+ value: 10.692
127
+ - type: map_at_5
128
+ value: 11.969
129
+ - type: map_at_10
130
+ value: 13.446
131
+ - type: ndcg_at_1
132
+ value: 10.5
133
+ - type: ndcg_at_3
134
+ value: 13.645
135
+ - type: ndcg_at_5
136
+ value: 15.504
137
+ - type: ndcg_at_10
138
+ value: 18.836
139
+ - type: recall_at_1
140
+ value: 6.646
141
+ - type: recall_at_3
142
+ value: 15.361
143
+ - type: recall_at_5
144
+ value: 19.925
145
+ - type: recall_at_10
146
+ value: 28.6
147
+ - type: precision_at_1
148
+ value: 10.5
149
+ - type: precision_at_3
150
+ value: 8.533
151
+ - type: precision_at_5
152
+ value: 6.9
153
+ - type: precision_at_10
154
+ value: 5.21
155
+ - type: mrr_at_1
156
+ value: 10.5
157
+ - type: mrr_at_3
158
+ value: 16.25
159
+ - type: mrr_at_5
160
+ value: 17.68
161
+ - type: mrr_at_10
162
+ value: 19.1759
163
  task:
164
  type: Retrieval
165
  - dataset:
166
  config: ar
167
+ name: MTEB MLQARetrieval (ar)
168
+ revision: 397ed406c1a7902140303e7faf60fff35b58d285
169
+ split: validation
170
+ type: mlqa/mmteb-mlqa
171
  metrics:
172
+ - type: main_score
173
+ value: 61.582
174
+ - type: map_at_1
175
+ value: 47.195
176
+ - type: map_at_3
177
+ value: 54.03
178
+ - type: map_at_5
179
+ value: 55.77
180
+ - type: map_at_10
181
+ value: 56.649
182
+ - type: ndcg_at_1
183
+ value: 47.195
184
+ - type: ndcg_at_3
185
+ value: 56.295
186
+ - type: ndcg_at_5
187
+ value: 59.417
188
+ - type: ndcg_at_10
189
+ value: 61.582
190
+ - type: recall_at_1
191
+ value: 47.195
192
+ - type: recall_at_3
193
+ value: 62.863
194
+ - type: recall_at_5
195
+ value: 70.406
196
+ - type: recall_at_10
197
+ value: 77.176
198
+ - type: precision_at_1
199
+ value: 47.195
200
+ - type: precision_at_3
201
+ value: 20.954
202
+ - type: precision_at_5
203
+ value: 14.081
204
+ - type: precision_at_10
205
+ value: 7.718
206
+ - type: mrr_at_1
207
+ value: 47.1954
208
+ - type: mrr_at_3
209
+ value: 54.0297
210
+ - type: mrr_at_5
211
+ value: 55.7705
212
+ - type: mrr_at_10
213
+ value: 56.6492
214
  task:
215
  type: Retrieval
216
  - dataset:
217
  config: default
218
+ name: MTEB SadeemQuestionRetrieval (ar)
219
  revision: 3cb0752b182e5d5d740df547748b06663c8e0bd9
220
  split: test
221
+ type: sadeem/mmteb-sadeem
222
  metrics:
223
+ - type: main_score
224
+ value: 57.653
225
+ - type: map_at_1
226
+ value: 25.084
227
+ - type: map_at_3
228
+ value: 46.338
229
+ - type: map_at_5
230
+ value: 47.556
231
+ - type: map_at_10
232
+ value: 48.207
233
+ - type: ndcg_at_1
234
+ value: 25.084
235
+ - type: ndcg_at_3
236
+ value: 53.91
237
+ - type: ndcg_at_5
238
+ value: 56.102
239
+ - type: ndcg_at_10
240
+ value: 57.653
241
+ - type: recall_at_1
242
+ value: 25.084
243
+ - type: recall_at_3
244
+ value: 76.017
245
+ - type: recall_at_5
246
+ value: 81.331
247
+ - type: recall_at_10
248
+ value: 86.07
249
+ - type: precision_at_1
250
+ value: 25.084
251
+ - type: precision_at_3
252
+ value: 25.339
253
+ - type: precision_at_5
254
+ value: 16.266
255
+ - type: precision_at_10
256
+ value: 8.607
257
+ - type: mrr_at_1
258
+ value: 23.1211
259
+ - type: mrr_at_3
260
+ value: 44.9657
261
+ - type: mrr_at_5
262
+ value: 46.3037
263
+ - type: mrr_at_10
264
+ value: 46.8749
265
  task:
266
  type: Retrieval
267
  - dataset: