ai-forever commited on
Commit
b955674
·
verified ·
1 Parent(s): 40ba94f

Add/update results for Gemma-3 12B (version 0.1.2, guid 613829d1b5f9412ab338d0a255e13173)

Browse files
Files changed (1) hide show
  1. results.json +435 -350
results.json CHANGED
@@ -1,351 +1,436 @@
1
- {
2
- "items": {
3
- "0.1.1": {
4
- "b0cf95586f434818a31f12325d9eae66": {
5
- "model_name": "Gemma-3 4B",
6
- "timestamp": "2025-03-28T10:48:36",
7
- "config": {
8
- "embedding_model": "sentence-transformers/distiluse-base-multilingual-cased",
9
- "retriever_type": "mmr",
10
- "retrieval_config": {
11
- "top_k": 5,
12
- "chunk_size": 1000,
13
- "chunk_overlap": 100
14
- }
15
- },
16
- "metrics": {
17
- "cond": {
18
- "retrieval": {
19
- "hit_rate": 0.74,
20
- "mrr": 0.5920000000000001,
21
- "precision": 0.16799999999999998
22
- },
23
- "generation": {
24
- "rouge1": 0.11885237343301858,
25
- "rougeL": 0.11885237343301858
26
- }
27
- },
28
- "comp": {
29
- "retrieval": {
30
- "hit_rate": 0.72,
31
- "mrr": 0.4933333333333333,
32
- "precision": 0.2
33
- },
34
- "generation": {
35
- "rouge1": 0.08331701939804853,
36
- "rougeL": 0.08331701939804853
37
- }
38
- },
39
- "set": {
40
- "retrieval": {
41
- "hit_rate": 0.52,
42
- "mrr": 0.38066666666666665,
43
- "precision": 0.132
44
- },
45
- "generation": {
46
- "rouge1": 0.09937513859436432,
47
- "rougeL": 0.0802916087780759
48
- }
49
- },
50
- "mh": {
51
- "retrieval": {
52
- "hit_rate": 0.6,
53
- "mrr": 0.4073333333333333,
54
- "precision": 0.14800000000000002
55
- },
56
- "generation": {
57
- "rouge1": 0.0643998533286459,
58
- "rougeL": 0.0643998533286459
59
- }
60
- },
61
- "simple": {
62
- "retrieval": {
63
- "hit_rate": 0.5,
64
- "mrr": 0.3496666666666666,
65
- "precision": 0.12
66
- },
67
- "generation": {
68
- "rouge1": 0.05821997465191799,
69
- "rougeL": 0.05821997465191799
70
- }
71
- },
72
- "overall": {
73
- "retrieval": {
74
- "hit_rate": 0.616,
75
- "mrr": 0.4446,
76
- "precision": 0.15360000000000001
77
- },
78
- "generation": {
79
- "rouge1": 0.08483287188119908,
80
- "rougeL": 0.08101616591794138
81
- }
82
- }
83
- },
84
- "metadata": {
85
- "n_questions": 250,
86
- "submit_timestamp": ""
87
- }
88
- },
89
- "73ae38971b0c48a2bedd143db7c25d3b": {
90
- "model_name": "Llama 3.3 70B",
91
- "timestamp": "2025-03-28T10:51:17",
92
- "config": {
93
- "embedding_model": "sentence-transformers/distiluse-base-multilingual-cased",
94
- "retriever_type": "mmr",
95
- "retrieval_config": {
96
- "top_k": 5,
97
- "chunk_size": 1000,
98
- "chunk_overlap": 100
99
- }
100
- },
101
- "metrics": {
102
- "cond": {
103
- "retrieval": {
104
- "hit_rate": 0.64,
105
- "mrr": 0.514,
106
- "precision": 0.14400000000000002
107
- },
108
- "generation": {
109
- "rouge1": 0.2493333333333333,
110
- "rougeL": 0.2493333333333333
111
- }
112
- },
113
- "comp": {
114
- "retrieval": {
115
- "hit_rate": 0.72,
116
- "mrr": 0.4933333333333333,
117
- "precision": 0.2
118
- },
119
- "generation": {
120
- "rouge1": 0.23564204559660382,
121
- "rougeL": 0.23564204559660382
122
- }
123
- },
124
- "set": {
125
- "retrieval": {
126
- "hit_rate": 0.5,
127
- "mrr": 0.3683333333333333,
128
- "precision": 0.12400000000000003
129
- },
130
- "generation": {
131
- "rouge1": 0.21594023633499185,
132
- "rougeL": 0.17318924819269935
133
- }
134
- },
135
- "mh": {
136
- "retrieval": {
137
- "hit_rate": 0.58,
138
- "mrr": 0.3846666666666666,
139
- "precision": 0.14800000000000002
140
- },
141
- "generation": {
142
- "rouge1": 0.19785714285714284,
143
- "rougeL": 0.19785714285714284
144
- }
145
- },
146
- "simple": {
147
- "retrieval": {
148
- "hit_rate": 0.48,
149
- "mrr": 0.33399999999999996,
150
- "precision": 0.12
151
- },
152
- "generation": {
153
- "rouge1": 0.16162049062049064,
154
- "rougeL": 0.16162049062049064
155
- }
156
- },
157
- "overall": {
158
- "retrieval": {
159
- "hit_rate": 0.584,
160
- "mrr": 0.41886666666666666,
161
- "precision": 0.1472
162
- },
163
- "generation": {
164
- "rouge1": 0.21207864974851248,
165
- "rougeL": 0.20352845212005397
166
- }
167
- }
168
- },
169
- "metadata": {
170
- "n_questions": 250,
171
- "submit_timestamp": ""
172
- }
173
- },
174
- "b4e8183f6aab486681c2c6ea3a13165d": {
175
- "model_name": "Gemma-3 12B",
176
- "timestamp": "2025-03-28T10:52:25",
177
- "config": {
178
- "embedding_model": "sentence-transformers/distiluse-base-multilingual-cased",
179
- "retriever_type": "mmr",
180
- "retrieval_config": {
181
- "top_k": 5,
182
- "chunk_size": 1000,
183
- "chunk_overlap": 100
184
- }
185
- },
186
- "metrics": {
187
- "cond": {
188
- "retrieval": {
189
- "hit_rate": 0.74,
190
- "mrr": 0.5920000000000001,
191
- "precision": 0.16799999999999998
192
- },
193
- "generation": {
194
- "rouge1": 0.14584126984126983,
195
- "rougeL": 0.14584126984126983
196
- }
197
- },
198
- "comp": {
199
- "retrieval": {
200
- "hit_rate": 0.72,
201
- "mrr": 0.4933333333333333,
202
- "precision": 0.2
203
- },
204
- "generation": {
205
- "rouge1": 0.17622510822510826,
206
- "rougeL": 0.17622510822510826
207
- }
208
- },
209
- "set": {
210
- "retrieval": {
211
- "hit_rate": 0.52,
212
- "mrr": 0.38066666666666665,
213
- "precision": 0.132
214
- },
215
- "generation": {
216
- "rouge1": 0.13877635775154523,
217
- "rougeL": 0.11396197213715961
218
- }
219
- },
220
- "mh": {
221
- "retrieval": {
222
- "hit_rate": 0.6,
223
- "mrr": 0.4073333333333333,
224
- "precision": 0.14800000000000002
225
- },
226
- "generation": {
227
- "rouge1": 0.047134453781512604,
228
- "rougeL": 0.047134453781512604
229
- }
230
- },
231
- "simple": {
232
- "retrieval": {
233
- "hit_rate": 0.5,
234
- "mrr": 0.3496666666666666,
235
- "precision": 0.12
236
- },
237
- "generation": {
238
- "rouge1": 0.08229166315355574,
239
- "rougeL": 0.08229166315355574
240
- }
241
- },
242
- "overall": {
243
- "retrieval": {
244
- "hit_rate": 0.616,
245
- "mrr": 0.4446,
246
- "precision": 0.15360000000000001
247
- },
248
- "generation": {
249
- "rouge1": 0.11805377055059833,
250
- "rougeL": 0.11309089342772122
251
- }
252
- }
253
- },
254
- "metadata": {
255
- "n_questions": 250,
256
- "submit_timestamp": ""
257
- }
258
- }
259
- },
260
- "0.1.2": {
261
- "73ae38971b0c48a2bedd143db7c25d3b": {
262
- "model_name": "Llama 3.3 70B",
263
- "timestamp": "2025-03-28T10:55:59",
264
- "config": {
265
- "embedding_model": "sentence-transformers/distiluse-base-multilingual-cased",
266
- "retriever_type": "mmr",
267
- "retrieval_config": {
268
- "top_k": 5,
269
- "chunk_size": 1000,
270
- "chunk_overlap": 100
271
- }
272
- },
273
- "metrics": {
274
- "cond": {
275
- "retrieval": {
276
- "hit_rate": 0.64,
277
- "mrr": 0.514,
278
- "precision": 0.14400000000000002
279
- },
280
- "generation": {
281
- "rouge1": 0.2493333333333333,
282
- "rougeL": 0.2493333333333333
283
- }
284
- },
285
- "comp": {
286
- "retrieval": {
287
- "hit_rate": 0.72,
288
- "mrr": 0.4933333333333333,
289
- "precision": 0.2
290
- },
291
- "generation": {
292
- "rouge1": 0.23564204559660382,
293
- "rougeL": 0.23564204559660382
294
- }
295
- },
296
- "set": {
297
- "retrieval": {
298
- "hit_rate": 0.5,
299
- "mrr": 0.3683333333333333,
300
- "precision": 0.12400000000000003
301
- },
302
- "generation": {
303
- "rouge1": 0.21594023633499185,
304
- "rougeL": 0.17318924819269935
305
- }
306
- },
307
- "mh": {
308
- "retrieval": {
309
- "hit_rate": 0.58,
310
- "mrr": 0.3846666666666666,
311
- "precision": 0.14800000000000002
312
- },
313
- "generation": {
314
- "rouge1": 0.19785714285714284,
315
- "rougeL": 0.19785714285714284
316
- }
317
- },
318
- "simple": {
319
- "retrieval": {
320
- "hit_rate": 0.48,
321
- "mrr": 0.33399999999999996,
322
- "precision": 0.12
323
- },
324
- "generation": {
325
- "rouge1": 0.16162049062049064,
326
- "rougeL": 0.16162049062049064
327
- }
328
- },
329
- "overall": {
330
- "retrieval": {
331
- "hit_rate": 0.584,
332
- "mrr": 0.41886666666666666,
333
- "precision": 0.1472
334
- },
335
- "generation": {
336
- "rouge1": 0.21207864974851248,
337
- "rougeL": 0.20352845212005397
338
- }
339
- }
340
- },
341
- "metadata": {
342
- "n_questions": 250,
343
- "submit_timestamp": ""
344
- }
345
- }
346
- }
347
- },
348
- "last_version": "0.1.2",
349
- "n_questions": 250,
350
- "date_title": "28 \u043c\u0430\u0440\u0442\u0430 2025"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
351
  }
 
1
+ {
2
+ "items": {
3
+ "0.1.1": {
4
+ "b0cf95586f434818a31f12325d9eae66": {
5
+ "model_name": "Gemma-3 4B",
6
+ "timestamp": "2025-03-28T10:48:36",
7
+ "config": {
8
+ "embedding_model": "sentence-transformers/distiluse-base-multilingual-cased",
9
+ "retriever_type": "mmr",
10
+ "retrieval_config": {
11
+ "top_k": 5,
12
+ "chunk_size": 1000,
13
+ "chunk_overlap": 100
14
+ }
15
+ },
16
+ "metrics": {
17
+ "cond": {
18
+ "retrieval": {
19
+ "hit_rate": 0.74,
20
+ "mrr": 0.5920000000000001,
21
+ "precision": 0.16799999999999998
22
+ },
23
+ "generation": {
24
+ "rouge1": 0.11885237343301858,
25
+ "rougeL": 0.11885237343301858
26
+ }
27
+ },
28
+ "comp": {
29
+ "retrieval": {
30
+ "hit_rate": 0.72,
31
+ "mrr": 0.4933333333333333,
32
+ "precision": 0.2
33
+ },
34
+ "generation": {
35
+ "rouge1": 0.08331701939804853,
36
+ "rougeL": 0.08331701939804853
37
+ }
38
+ },
39
+ "set": {
40
+ "retrieval": {
41
+ "hit_rate": 0.52,
42
+ "mrr": 0.38066666666666665,
43
+ "precision": 0.132
44
+ },
45
+ "generation": {
46
+ "rouge1": 0.09937513859436432,
47
+ "rougeL": 0.0802916087780759
48
+ }
49
+ },
50
+ "mh": {
51
+ "retrieval": {
52
+ "hit_rate": 0.6,
53
+ "mrr": 0.4073333333333333,
54
+ "precision": 0.14800000000000002
55
+ },
56
+ "generation": {
57
+ "rouge1": 0.0643998533286459,
58
+ "rougeL": 0.0643998533286459
59
+ }
60
+ },
61
+ "simple": {
62
+ "retrieval": {
63
+ "hit_rate": 0.5,
64
+ "mrr": 0.3496666666666666,
65
+ "precision": 0.12
66
+ },
67
+ "generation": {
68
+ "rouge1": 0.05821997465191799,
69
+ "rougeL": 0.05821997465191799
70
+ }
71
+ },
72
+ "overall": {
73
+ "retrieval": {
74
+ "hit_rate": 0.616,
75
+ "mrr": 0.4446,
76
+ "precision": 0.15360000000000001
77
+ },
78
+ "generation": {
79
+ "rouge1": 0.08483287188119908,
80
+ "rougeL": 0.08101616591794138
81
+ }
82
+ }
83
+ },
84
+ "metadata": {
85
+ "n_questions": 250,
86
+ "submit_timestamp": ""
87
+ }
88
+ },
89
+ "73ae38971b0c48a2bedd143db7c25d3b": {
90
+ "model_name": "Llama 3.3 70B",
91
+ "timestamp": "2025-03-28T10:51:17",
92
+ "config": {
93
+ "embedding_model": "sentence-transformers/distiluse-base-multilingual-cased",
94
+ "retriever_type": "mmr",
95
+ "retrieval_config": {
96
+ "top_k": 5,
97
+ "chunk_size": 1000,
98
+ "chunk_overlap": 100
99
+ }
100
+ },
101
+ "metrics": {
102
+ "cond": {
103
+ "retrieval": {
104
+ "hit_rate": 0.64,
105
+ "mrr": 0.514,
106
+ "precision": 0.14400000000000002
107
+ },
108
+ "generation": {
109
+ "rouge1": 0.2493333333333333,
110
+ "rougeL": 0.2493333333333333
111
+ }
112
+ },
113
+ "comp": {
114
+ "retrieval": {
115
+ "hit_rate": 0.72,
116
+ "mrr": 0.4933333333333333,
117
+ "precision": 0.2
118
+ },
119
+ "generation": {
120
+ "rouge1": 0.23564204559660382,
121
+ "rougeL": 0.23564204559660382
122
+ }
123
+ },
124
+ "set": {
125
+ "retrieval": {
126
+ "hit_rate": 0.5,
127
+ "mrr": 0.3683333333333333,
128
+ "precision": 0.12400000000000003
129
+ },
130
+ "generation": {
131
+ "rouge1": 0.21594023633499185,
132
+ "rougeL": 0.17318924819269935
133
+ }
134
+ },
135
+ "mh": {
136
+ "retrieval": {
137
+ "hit_rate": 0.58,
138
+ "mrr": 0.3846666666666666,
139
+ "precision": 0.14800000000000002
140
+ },
141
+ "generation": {
142
+ "rouge1": 0.19785714285714284,
143
+ "rougeL": 0.19785714285714284
144
+ }
145
+ },
146
+ "simple": {
147
+ "retrieval": {
148
+ "hit_rate": 0.48,
149
+ "mrr": 0.33399999999999996,
150
+ "precision": 0.12
151
+ },
152
+ "generation": {
153
+ "rouge1": 0.16162049062049064,
154
+ "rougeL": 0.16162049062049064
155
+ }
156
+ },
157
+ "overall": {
158
+ "retrieval": {
159
+ "hit_rate": 0.584,
160
+ "mrr": 0.41886666666666666,
161
+ "precision": 0.1472
162
+ },
163
+ "generation": {
164
+ "rouge1": 0.21207864974851248,
165
+ "rougeL": 0.20352845212005397
166
+ }
167
+ }
168
+ },
169
+ "metadata": {
170
+ "n_questions": 250,
171
+ "submit_timestamp": ""
172
+ }
173
+ },
174
+ "b4e8183f6aab486681c2c6ea3a13165d": {
175
+ "model_name": "Gemma-3 12B",
176
+ "timestamp": "2025-03-28T10:52:25",
177
+ "config": {
178
+ "embedding_model": "sentence-transformers/distiluse-base-multilingual-cased",
179
+ "retriever_type": "mmr",
180
+ "retrieval_config": {
181
+ "top_k": 5,
182
+ "chunk_size": 1000,
183
+ "chunk_overlap": 100
184
+ }
185
+ },
186
+ "metrics": {
187
+ "cond": {
188
+ "retrieval": {
189
+ "hit_rate": 0.74,
190
+ "mrr": 0.5920000000000001,
191
+ "precision": 0.16799999999999998
192
+ },
193
+ "generation": {
194
+ "rouge1": 0.14584126984126983,
195
+ "rougeL": 0.14584126984126983
196
+ }
197
+ },
198
+ "comp": {
199
+ "retrieval": {
200
+ "hit_rate": 0.72,
201
+ "mrr": 0.4933333333333333,
202
+ "precision": 0.2
203
+ },
204
+ "generation": {
205
+ "rouge1": 0.17622510822510826,
206
+ "rougeL": 0.17622510822510826
207
+ }
208
+ },
209
+ "set": {
210
+ "retrieval": {
211
+ "hit_rate": 0.52,
212
+ "mrr": 0.38066666666666665,
213
+ "precision": 0.132
214
+ },
215
+ "generation": {
216
+ "rouge1": 0.13877635775154523,
217
+ "rougeL": 0.11396197213715961
218
+ }
219
+ },
220
+ "mh": {
221
+ "retrieval": {
222
+ "hit_rate": 0.6,
223
+ "mrr": 0.4073333333333333,
224
+ "precision": 0.14800000000000002
225
+ },
226
+ "generation": {
227
+ "rouge1": 0.047134453781512604,
228
+ "rougeL": 0.047134453781512604
229
+ }
230
+ },
231
+ "simple": {
232
+ "retrieval": {
233
+ "hit_rate": 0.5,
234
+ "mrr": 0.3496666666666666,
235
+ "precision": 0.12
236
+ },
237
+ "generation": {
238
+ "rouge1": 0.08229166315355574,
239
+ "rougeL": 0.08229166315355574
240
+ }
241
+ },
242
+ "overall": {
243
+ "retrieval": {
244
+ "hit_rate": 0.616,
245
+ "mrr": 0.4446,
246
+ "precision": 0.15360000000000001
247
+ },
248
+ "generation": {
249
+ "rouge1": 0.11805377055059833,
250
+ "rougeL": 0.11309089342772122
251
+ }
252
+ }
253
+ },
254
+ "metadata": {
255
+ "n_questions": 250,
256
+ "submit_timestamp": ""
257
+ }
258
+ }
259
+ },
260
+ "0.1.2": {
261
+ "73ae38971b0c48a2bedd143db7c25d3b": {
262
+ "model_name": "Llama 3.3 70B",
263
+ "timestamp": "2025-03-28T10:55:59",
264
+ "config": {
265
+ "embedding_model": "sentence-transformers/distiluse-base-multilingual-cased",
266
+ "retriever_type": "mmr",
267
+ "retrieval_config": {
268
+ "top_k": 5,
269
+ "chunk_size": 1000,
270
+ "chunk_overlap": 100
271
+ }
272
+ },
273
+ "metrics": {
274
+ "cond": {
275
+ "retrieval": {
276
+ "hit_rate": 0.64,
277
+ "mrr": 0.514,
278
+ "precision": 0.14400000000000002
279
+ },
280
+ "generation": {
281
+ "rouge1": 0.2493333333333333,
282
+ "rougeL": 0.2493333333333333
283
+ }
284
+ },
285
+ "comp": {
286
+ "retrieval": {
287
+ "hit_rate": 0.72,
288
+ "mrr": 0.4933333333333333,
289
+ "precision": 0.2
290
+ },
291
+ "generation": {
292
+ "rouge1": 0.23564204559660382,
293
+ "rougeL": 0.23564204559660382
294
+ }
295
+ },
296
+ "set": {
297
+ "retrieval": {
298
+ "hit_rate": 0.5,
299
+ "mrr": 0.3683333333333333,
300
+ "precision": 0.12400000000000003
301
+ },
302
+ "generation": {
303
+ "rouge1": 0.21594023633499185,
304
+ "rougeL": 0.17318924819269935
305
+ }
306
+ },
307
+ "mh": {
308
+ "retrieval": {
309
+ "hit_rate": 0.58,
310
+ "mrr": 0.3846666666666666,
311
+ "precision": 0.14800000000000002
312
+ },
313
+ "generation": {
314
+ "rouge1": 0.19785714285714284,
315
+ "rougeL": 0.19785714285714284
316
+ }
317
+ },
318
+ "simple": {
319
+ "retrieval": {
320
+ "hit_rate": 0.48,
321
+ "mrr": 0.33399999999999996,
322
+ "precision": 0.12
323
+ },
324
+ "generation": {
325
+ "rouge1": 0.16162049062049064,
326
+ "rougeL": 0.16162049062049064
327
+ }
328
+ },
329
+ "overall": {
330
+ "retrieval": {
331
+ "hit_rate": 0.584,
332
+ "mrr": 0.41886666666666666,
333
+ "precision": 0.1472
334
+ },
335
+ "generation": {
336
+ "rouge1": 0.21207864974851248,
337
+ "rougeL": 0.20352845212005397
338
+ }
339
+ }
340
+ },
341
+ "metadata": {
342
+ "n_questions": 250,
343
+ "submit_timestamp": ""
344
+ }
345
+ },
346
+ "613829d1b5f9412ab338d0a255e13173": {
347
+ "model_name": "Gemma-3 12B",
348
+ "timestamp": "2025-04-01T07:07:53",
349
+ "config": {
350
+ "embedding_model": "sentence-transformers/distiluse-base-multilingual-cased",
351
+ "retriever_type": "mmr",
352
+ "retrieval_config": {
353
+ "top_k": 5,
354
+ "chunk_size": 1000,
355
+ "chunk_overlap": 100
356
+ }
357
+ },
358
+ "metrics": {
359
+ "cond": {
360
+ "retrieval": {
361
+ "hit_rate": 0.74,
362
+ "mrr": 0.5920000000000001,
363
+ "precision": 0.16799999999999998
364
+ },
365
+ "generation": {
366
+ "rouge1": 0.14584126984126983,
367
+ "rougeL": 0.14584126984126983
368
+ }
369
+ },
370
+ "comp": {
371
+ "retrieval": {
372
+ "hit_rate": 0.72,
373
+ "mrr": 0.4933333333333333,
374
+ "precision": 0.2
375
+ },
376
+ "generation": {
377
+ "rouge1": 0.17622510822510826,
378
+ "rougeL": 0.17622510822510826
379
+ }
380
+ },
381
+ "set": {
382
+ "retrieval": {
383
+ "hit_rate": 0.52,
384
+ "mrr": 0.38066666666666665,
385
+ "precision": 0.132
386
+ },
387
+ "generation": {
388
+ "rouge1": 0.13877635775154523,
389
+ "rougeL": 0.11396197213715961
390
+ }
391
+ },
392
+ "mh": {
393
+ "retrieval": {
394
+ "hit_rate": 0.6,
395
+ "mrr": 0.4073333333333333,
396
+ "precision": 0.14800000000000002
397
+ },
398
+ "generation": {
399
+ "rouge1": 0.047134453781512604,
400
+ "rougeL": 0.047134453781512604
401
+ }
402
+ },
403
+ "simple": {
404
+ "retrieval": {
405
+ "hit_rate": 0.5,
406
+ "mrr": 0.3496666666666666,
407
+ "precision": 0.12
408
+ },
409
+ "generation": {
410
+ "rouge1": 0.08229166315355574,
411
+ "rougeL": 0.08229166315355574
412
+ }
413
+ },
414
+ "overall": {
415
+ "retrieval": {
416
+ "hit_rate": 0.616,
417
+ "mrr": 0.4446,
418
+ "precision": 0.15360000000000001
419
+ },
420
+ "generation": {
421
+ "rouge1": 0.11805377055059833,
422
+ "rougeL": 0.11309089342772122
423
+ }
424
+ }
425
+ },
426
+ "metadata": {
427
+ "n_questions": 250,
428
+ "submit_timestamp": ""
429
+ }
430
+ }
431
+ }
432
+ },
433
+ "last_version": "0.1.2",
434
+ "n_questions": 250,
435
+ "date_title": "01 \u0430\u043f\u0440\u0435\u043b\u044f 2025"
436
  }