ai-forever commited on
Commit
eaf544f
·
verified ·
1 Parent(s): 323e9a7

Add/update results for Gemma-3 12B (version 0.1.1, guid b4e8183f6aab486681c2c6ea3a13165d)

Browse files
Files changed (1) hide show
  1. results.json +85 -0
results.json CHANGED
@@ -170,6 +170,91 @@
170
  "n_questions": 250,
171
  "submit_timestamp": ""
172
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
173
  }
174
  }
175
  },
 
170
  "n_questions": 250,
171
  "submit_timestamp": ""
172
  }
173
+ },
174
+ "b4e8183f6aab486681c2c6ea3a13165d": {
175
+ "model_name": "Gemma-3 12B",
176
+ "timestamp": "2025-03-28T10:52:25",
177
+ "config": {
178
+ "embedding_model": "sentence-transformers/distiluse-base-multilingual-cased",
179
+ "retriever_type": "mmr",
180
+ "retrieval_config": {
181
+ "top_k": 5,
182
+ "chunk_size": 1000,
183
+ "chunk_overlap": 100
184
+ }
185
+ },
186
+ "metrics": {
187
+ "cond": {
188
+ "retrieval": {
189
+ "hit_rate": 0.74,
190
+ "mrr": 0.5920000000000001,
191
+ "precision": 0.16799999999999998
192
+ },
193
+ "generation": {
194
+ "rouge1": 0.14584126984126983,
195
+ "rougeL": 0.14584126984126983
196
+ }
197
+ },
198
+ "comp": {
199
+ "retrieval": {
200
+ "hit_rate": 0.72,
201
+ "mrr": 0.4933333333333333,
202
+ "precision": 0.2
203
+ },
204
+ "generation": {
205
+ "rouge1": 0.17622510822510826,
206
+ "rougeL": 0.17622510822510826
207
+ }
208
+ },
209
+ "set": {
210
+ "retrieval": {
211
+ "hit_rate": 0.52,
212
+ "mrr": 0.38066666666666665,
213
+ "precision": 0.132
214
+ },
215
+ "generation": {
216
+ "rouge1": 0.13877635775154523,
217
+ "rougeL": 0.11396197213715961
218
+ }
219
+ },
220
+ "mh": {
221
+ "retrieval": {
222
+ "hit_rate": 0.6,
223
+ "mrr": 0.4073333333333333,
224
+ "precision": 0.14800000000000002
225
+ },
226
+ "generation": {
227
+ "rouge1": 0.047134453781512604,
228
+ "rougeL": 0.047134453781512604
229
+ }
230
+ },
231
+ "simple": {
232
+ "retrieval": {
233
+ "hit_rate": 0.5,
234
+ "mrr": 0.3496666666666666,
235
+ "precision": 0.12
236
+ },
237
+ "generation": {
238
+ "rouge1": 0.08229166315355574,
239
+ "rougeL": 0.08229166315355574
240
+ }
241
+ },
242
+ "overall": {
243
+ "retrieval": {
244
+ "hit_rate": 0.616,
245
+ "mrr": 0.4446,
246
+ "precision": 0.15360000000000001
247
+ },
248
+ "generation": {
249
+ "rouge1": 0.11805377055059833,
250
+ "rougeL": 0.11309089342772122
251
+ }
252
+ }
253
+ },
254
+ "metadata": {
255
+ "n_questions": 250,
256
+ "submit_timestamp": ""
257
+ }
258
  }
259
  }
260
  },