ai-forever commited on
Commit
bce6f4a
·
verified ·
1 Parent(s): abf53d3

Add/update results for Gemma-3 4B (version 0.1.1, guid b0cf95586f434818a31f12325d9eae66)

Browse files
Files changed (1) hide show
  1. results.json +93 -6
results.json CHANGED
@@ -1,7 +1,94 @@
1
  {
2
- "items": {
3
- },
4
- "last_version": "1.0",
5
- "n_questions": "11",
6
- "date_title": "22 марта 2025"
7
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  {
2
+ "items": {
3
+ "0.1.1": {
4
+ "b0cf95586f434818a31f12325d9eae66": {
5
+ "model_name": "Gemma-3 4B",
6
+ "timestamp": "2025-03-28T10:48:36",
7
+ "config": {
8
+ "embedding_model": "sentence-transformers/distiluse-base-multilingual-cased",
9
+ "retriever_type": "mmr",
10
+ "retrieval_config": {
11
+ "top_k": 5,
12
+ "chunk_size": 1000,
13
+ "chunk_overlap": 100
14
+ }
15
+ },
16
+ "metrics": {
17
+ "cond": {
18
+ "retrieval": {
19
+ "hit_rate": 0.74,
20
+ "mrr": 0.5920000000000001,
21
+ "precision": 0.16799999999999998
22
+ },
23
+ "generation": {
24
+ "rouge1": 0.11885237343301858,
25
+ "rougeL": 0.11885237343301858
26
+ }
27
+ },
28
+ "comp": {
29
+ "retrieval": {
30
+ "hit_rate": 0.72,
31
+ "mrr": 0.4933333333333333,
32
+ "precision": 0.2
33
+ },
34
+ "generation": {
35
+ "rouge1": 0.08331701939804853,
36
+ "rougeL": 0.08331701939804853
37
+ }
38
+ },
39
+ "set": {
40
+ "retrieval": {
41
+ "hit_rate": 0.52,
42
+ "mrr": 0.38066666666666665,
43
+ "precision": 0.132
44
+ },
45
+ "generation": {
46
+ "rouge1": 0.09937513859436432,
47
+ "rougeL": 0.0802916087780759
48
+ }
49
+ },
50
+ "mh": {
51
+ "retrieval": {
52
+ "hit_rate": 0.6,
53
+ "mrr": 0.4073333333333333,
54
+ "precision": 0.14800000000000002
55
+ },
56
+ "generation": {
57
+ "rouge1": 0.0643998533286459,
58
+ "rougeL": 0.0643998533286459
59
+ }
60
+ },
61
+ "simple": {
62
+ "retrieval": {
63
+ "hit_rate": 0.5,
64
+ "mrr": 0.3496666666666666,
65
+ "precision": 0.12
66
+ },
67
+ "generation": {
68
+ "rouge1": 0.05821997465191799,
69
+ "rougeL": 0.05821997465191799
70
+ }
71
+ },
72
+ "overall": {
73
+ "retrieval": {
74
+ "hit_rate": 0.616,
75
+ "mrr": 0.4446,
76
+ "precision": 0.15360000000000001
77
+ },
78
+ "generation": {
79
+ "rouge1": 0.08483287188119908,
80
+ "rougeL": 0.08101616591794138
81
+ }
82
+ }
83
+ },
84
+ "metadata": {
85
+ "n_questions": 250,
86
+ "submit_timestamp": ""
87
+ }
88
+ }
89
+ }
90
+ },
91
+ "last_version": "0.1.1",
92
+ "n_questions": 250,
93
+ "date_title": "28 \u043c\u0430\u0440\u0442\u0430 2025"
94
+ }