Spaces:
Running
Running
Add/update results for Gemma-3 4B (version 1.24.1, guid 6e34847eea964f838b7b2f6e37e9a585)
Browse files- results.json +79 -3
results.json
CHANGED
|
@@ -513,9 +513,85 @@
|
|
| 513 |
"submit_timestamp": ""
|
| 514 |
}
|
| 515 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 516 |
}
|
| 517 |
},
|
| 518 |
-
"last_version": "
|
| 519 |
-
"n_questions":
|
| 520 |
-
"date_title": "
|
| 521 |
}
|
|
|
|
| 513 |
"submit_timestamp": ""
|
| 514 |
}
|
| 515 |
}
|
| 516 |
+
},
|
| 517 |
+
"1.24.1": {
|
| 518 |
+
"6e34847eea964f838b7b2f6e37e9a585": {
|
| 519 |
+
"model_name": "Gemma-3 4B",
|
| 520 |
+
"timestamp": "2025-05-20T10:35:52",
|
| 521 |
+
"config": {
|
| 522 |
+
"embedding_model": "sentence-transformers/distiluse-base-multilingual-cased",
|
| 523 |
+
"retriever_type": "mmr",
|
| 524 |
+
"retrieval_config": {
|
| 525 |
+
"top_k": 5,
|
| 526 |
+
"chunk_size": 1000,
|
| 527 |
+
"chunk_overlap": 100
|
| 528 |
+
}
|
| 529 |
+
},
|
| 530 |
+
"metrics": {
|
| 531 |
+
"cond": {
|
| 532 |
+
"retrieval": {
|
| 533 |
+
"hit_rate": 0.0,
|
| 534 |
+
"mrr": 0.0,
|
| 535 |
+
"precision": 0.0
|
| 536 |
+
},
|
| 537 |
+
"generation": {
|
| 538 |
+
"rouge1": 0.0,
|
| 539 |
+
"rougeL": 0.0
|
| 540 |
+
}
|
| 541 |
+
},
|
| 542 |
+
"set": {
|
| 543 |
+
"retrieval": {
|
| 544 |
+
"hit_rate": 0.013333333333333334,
|
| 545 |
+
"mrr": 0.006666666666666667,
|
| 546 |
+
"precision": 0.002666666666666667
|
| 547 |
+
},
|
| 548 |
+
"generation": {
|
| 549 |
+
"rouge1": 0.04120834340355562,
|
| 550 |
+
"rougeL": 0.03920057190755136
|
| 551 |
+
}
|
| 552 |
+
},
|
| 553 |
+
"mh": {
|
| 554 |
+
"retrieval": {
|
| 555 |
+
"hit_rate": 0.0,
|
| 556 |
+
"mrr": 0.0,
|
| 557 |
+
"precision": 0.0
|
| 558 |
+
},
|
| 559 |
+
"generation": {
|
| 560 |
+
"rouge1": 0.003916409431473995,
|
| 561 |
+
"rougeL": 0.003916409431473995
|
| 562 |
+
}
|
| 563 |
+
},
|
| 564 |
+
"simple": {
|
| 565 |
+
"retrieval": {
|
| 566 |
+
"hit_rate": 0.006535947712418301,
|
| 567 |
+
"mrr": 0.0021786492374727667,
|
| 568 |
+
"precision": 0.0013071895424836603
|
| 569 |
+
},
|
| 570 |
+
"generation": {
|
| 571 |
+
"rouge1": 0.005283838541135648,
|
| 572 |
+
"rougeL": 0.004760962724142184
|
| 573 |
+
}
|
| 574 |
+
},
|
| 575 |
+
"overall": {
|
| 576 |
+
"retrieval": {
|
| 577 |
+
"hit_rate": 0.004975124378109453,
|
| 578 |
+
"mrr": 0.002211166390270868,
|
| 579 |
+
"precision": 0.0009950248756218907
|
| 580 |
+
},
|
| 581 |
+
"generation": {
|
| 582 |
+
"rouge1": 0.012565738345021884,
|
| 583 |
+
"rougeL": 0.011933622715833427
|
| 584 |
+
}
|
| 585 |
+
}
|
| 586 |
+
},
|
| 587 |
+
"metadata": {
|
| 588 |
+
"n_questions": 603,
|
| 589 |
+
"submit_timestamp": ""
|
| 590 |
+
}
|
| 591 |
+
}
|
| 592 |
}
|
| 593 |
},
|
| 594 |
+
"last_version": "1.24.1",
|
| 595 |
+
"n_questions": 603,
|
| 596 |
+
"date_title": "20 \u043c\u0430\u044f 2025"
|
| 597 |
}
|