Spaces:
Running
Running
Add/update results for Gemma-3 12B k8 (version 0.1.2, guid f7bb3e3141224421b1c7a2b1e48451fb)
Browse files- results.json +85 -0
results.json
CHANGED
@@ -427,6 +427,91 @@
|
|
427 |
"n_questions": 250,
|
428 |
"submit_timestamp": ""
|
429 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
430 |
}
|
431 |
}
|
432 |
},
|
|
|
427 |
"n_questions": 250,
|
428 |
"submit_timestamp": ""
|
429 |
}
|
430 |
+
},
|
431 |
+
"f7bb3e3141224421b1c7a2b1e48451fb": {
|
432 |
+
"model_name": "Gemma-3 12B k8",
|
433 |
+
"timestamp": "2025-04-01T07:15:29",
|
434 |
+
"config": {
|
435 |
+
"embedding_model": "sentence-transformers/distiluse-base-multilingual-cased",
|
436 |
+
"retriever_type": "mmr",
|
437 |
+
"retrieval_config": {
|
438 |
+
"top_k": 8,
|
439 |
+
"chunk_size": 1000,
|
440 |
+
"chunk_overlap": 100
|
441 |
+
}
|
442 |
+
},
|
443 |
+
"metrics": {
|
444 |
+
"cond": {
|
445 |
+
"retrieval": {
|
446 |
+
"hit_rate": 0.76,
|
447 |
+
"mrr": 0.5915476190476191,
|
448 |
+
"precision": 0.1175
|
449 |
+
},
|
450 |
+
"generation": {
|
451 |
+
"rouge1": 0.15650517598343686,
|
452 |
+
"rougeL": 0.15650517598343686
|
453 |
+
}
|
454 |
+
},
|
455 |
+
"comp": {
|
456 |
+
"retrieval": {
|
457 |
+
"hit_rate": 0.82,
|
458 |
+
"mrr": 0.5456904761904762,
|
459 |
+
"precision": 0.1725
|
460 |
+
},
|
461 |
+
"generation": {
|
462 |
+
"rouge1": 0.14202105084458025,
|
463 |
+
"rougeL": 0.14202105084458025
|
464 |
+
}
|
465 |
+
},
|
466 |
+
"set": {
|
467 |
+
"retrieval": {
|
468 |
+
"hit_rate": 0.66,
|
469 |
+
"mrr": 0.3988333333333333,
|
470 |
+
"precision": 0.12
|
471 |
+
},
|
472 |
+
"generation": {
|
473 |
+
"rouge1": 0.18979595254372256,
|
474 |
+
"rougeL": 0.14788271782150667
|
475 |
+
}
|
476 |
+
},
|
477 |
+
"mh": {
|
478 |
+
"retrieval": {
|
479 |
+
"hit_rate": 0.7,
|
480 |
+
"mrr": 0.41404761904761905,
|
481 |
+
"precision": 0.12
|
482 |
+
},
|
483 |
+
"generation": {
|
484 |
+
"rouge1": 0.12185830835830835,
|
485 |
+
"rougeL": 0.12185830835830835
|
486 |
+
}
|
487 |
+
},
|
488 |
+
"simple": {
|
489 |
+
"retrieval": {
|
490 |
+
"hit_rate": 0.58,
|
491 |
+
"mrr": 0.4046666666666667,
|
492 |
+
"precision": 0.1
|
493 |
+
},
|
494 |
+
"generation": {
|
495 |
+
"rouge1": 0.09656098803157626,
|
496 |
+
"rougeL": 0.09656098803157626
|
497 |
+
}
|
498 |
+
},
|
499 |
+
"overall": {
|
500 |
+
"retrieval": {
|
501 |
+
"hit_rate": 0.704,
|
502 |
+
"mrr": 0.4709571428571429,
|
503 |
+
"precision": 0.126
|
504 |
+
},
|
505 |
+
"generation": {
|
506 |
+
"rouge1": 0.14134829515232486,
|
507 |
+
"rougeL": 0.13296564820788165
|
508 |
+
}
|
509 |
+
}
|
510 |
+
},
|
511 |
+
"metadata": {
|
512 |
+
"n_questions": 250,
|
513 |
+
"submit_timestamp": ""
|
514 |
+
}
|
515 |
}
|
516 |
}
|
517 |
},
|