ai-forever commited on
Commit
cf61cb9
·
verified ·
1 Parent(s): a37858c

Add/update results for Gemma-3 1B (version 1.33.1, guid 0733a3f98fc046deb0bfe1b7cc110547)

Browse files
Files changed (1) hide show
  1. results.json +78 -2
results.json CHANGED
@@ -301,9 +301,85 @@
301
  "submit_timestamp": ""
302
  }
303
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
304
  }
305
  },
306
- "last_version": "1.32.1",
307
- "n_questions": 84,
308
  "date_title": "22 \u043c\u0430\u044f 2025"
309
  }
 
301
  "submit_timestamp": ""
302
  }
303
  }
304
+ },
305
+ "1.33.1": {
306
+ "0733a3f98fc046deb0bfe1b7cc110547": {
307
+ "model_name": "Gemma-3 1B",
308
+ "timestamp": "2025-05-22T10:26:07",
309
+ "config": {
310
+ "embedding_model": "sentence-transformers/distiluse-base-multilingual-cased",
311
+ "retriever_type": "mmr",
312
+ "retrieval_config": {
313
+ "top_k": 5,
314
+ "chunk_size": 1000,
315
+ "chunk_overlap": 100
316
+ }
317
+ },
318
+ "metrics": {
319
+ "mh": {
320
+ "retrieval": {
321
+ "hit_rate": 0.5208333333333334,
322
+ "mrr": 0.3277777777777778,
323
+ "precision": 0.11666666666666668
324
+ },
325
+ "generation": {
326
+ "rouge1": 0.027308327224987633,
327
+ "rougeL": 0.027308327224987633
328
+ }
329
+ },
330
+ "cond": {
331
+ "retrieval": {
332
+ "hit_rate": 0.6078431372549019,
333
+ "mrr": 0.45392156862745103,
334
+ "precision": 0.1725490196078431
335
+ },
336
+ "generation": {
337
+ "rouge1": 0.019595373319985267,
338
+ "rougeL": 0.019595373319985267
339
+ }
340
+ },
341
+ "simple": {
342
+ "retrieval": {
343
+ "hit_rate": 0.5925925925925926,
344
+ "mrr": 0.3833333333333333,
345
+ "precision": 0.13703703703703707
346
+ },
347
+ "generation": {
348
+ "rouge1": 0.06423697711511875,
349
+ "rougeL": 0.06423697711511875
350
+ }
351
+ },
352
+ "set": {
353
+ "retrieval": {
354
+ "hit_rate": 0.6,
355
+ "mrr": 0.24,
356
+ "precision": 0.16
357
+ },
358
+ "generation": {
359
+ "rouge1": 0.04461659954197268,
360
+ "rougeL": 0.04461659954197268
361
+ }
362
+ },
363
+ "overall": {
364
+ "retrieval": {
365
+ "hit_rate": 0.5759493670886076,
366
+ "mrr": 0.384704641350211,
367
+ "precision": 0.1430379746835443
368
+ },
369
+ "generation": {
370
+ "rouge1": 0.03798761713952488,
371
+ "rougeL": 0.03798761713952488
372
+ }
373
+ }
374
+ },
375
+ "metadata": {
376
+ "n_questions": 158,
377
+ "submit_timestamp": ""
378
+ }
379
+ }
380
  }
381
  },
382
+ "last_version": "1.33.1",
383
+ "n_questions": 158,
384
  "date_title": "22 \u043c\u0430\u044f 2025"
385
  }