Spaces:
Running
Running
Add/update results for Gemma-3 1B (version 1.32.1, guid 0733a3f98fc046deb0bfe1b7cc110547)
Browse files- results.json +79 -3
results.json
CHANGED
@@ -151,9 +151,85 @@
|
|
151 |
"submit_timestamp": ""
|
152 |
}
|
153 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
154 |
}
|
155 |
},
|
156 |
-
"last_version": "1.
|
157 |
-
"n_questions":
|
158 |
-
"date_title": "
|
159 |
}
|
|
|
151 |
"submit_timestamp": ""
|
152 |
}
|
153 |
}
|
154 |
+
},
|
155 |
+
"1.32.1": {
|
156 |
+
"0733a3f98fc046deb0bfe1b7cc110547": {
|
157 |
+
"model_name": "Gemma-3 1B",
|
158 |
+
"timestamp": "2025-05-22T09:41:08",
|
159 |
+
"config": {
|
160 |
+
"embedding_model": "sentence-transformers/distiluse-base-multilingual-cased",
|
161 |
+
"retriever_type": "mmr",
|
162 |
+
"retrieval_config": {
|
163 |
+
"top_k": 5,
|
164 |
+
"chunk_size": 1000,
|
165 |
+
"chunk_overlap": 100
|
166 |
+
}
|
167 |
+
},
|
168 |
+
"metrics": {
|
169 |
+
"mh": {
|
170 |
+
"retrieval": {
|
171 |
+
"hit_rate": 0.5862068965517241,
|
172 |
+
"mrr": 0.47413793103448276,
|
173 |
+
"precision": 0.11724137931034484
|
174 |
+
},
|
175 |
+
"generation": {
|
176 |
+
"rouge1": 0.02324309735987758,
|
177 |
+
"rougeL": 0.02324309735987758
|
178 |
+
}
|
179 |
+
},
|
180 |
+
"cond": {
|
181 |
+
"retrieval": {
|
182 |
+
"hit_rate": 0.6956521739130435,
|
183 |
+
"mrr": 0.6956521739130435,
|
184 |
+
"precision": 0.1478260869565218
|
185 |
+
},
|
186 |
+
"generation": {
|
187 |
+
"rouge1": 0.03246378430639343,
|
188 |
+
"rougeL": 0.03184707138625775
|
189 |
+
}
|
190 |
+
},
|
191 |
+
"simple": {
|
192 |
+
"retrieval": {
|
193 |
+
"hit_rate": 0.6206896551724138,
|
194 |
+
"mrr": 0.47701149425287354,
|
195 |
+
"precision": 0.1241379310344828
|
196 |
+
},
|
197 |
+
"generation": {
|
198 |
+
"rouge1": 0.03693659162302205,
|
199 |
+
"rougeL": 0.03693659162302205
|
200 |
+
}
|
201 |
+
},
|
202 |
+
"set": {
|
203 |
+
"retrieval": {
|
204 |
+
"hit_rate": 0.3333333333333333,
|
205 |
+
"mrr": 0.3333333333333333,
|
206 |
+
"precision": 0.06666666666666667
|
207 |
+
},
|
208 |
+
"generation": {
|
209 |
+
"rouge1": 0.053958575300038725,
|
210 |
+
"rougeL": 0.053958575300038725
|
211 |
+
}
|
212 |
+
},
|
213 |
+
"overall": {
|
214 |
+
"retrieval": {
|
215 |
+
"hit_rate": 0.6190476190476191,
|
216 |
+
"mrr": 0.5307539682539681,
|
217 |
+
"precision": 0.12619047619047616
|
218 |
+
},
|
219 |
+
"generation": {
|
220 |
+
"rouge1": 0.031592306493467316,
|
221 |
+
"rougeL": 0.03142344462247778
|
222 |
+
}
|
223 |
+
}
|
224 |
+
},
|
225 |
+
"metadata": {
|
226 |
+
"n_questions": 84,
|
227 |
+
"submit_timestamp": ""
|
228 |
+
}
|
229 |
+
}
|
230 |
}
|
231 |
},
|
232 |
+
"last_version": "1.32.1",
|
233 |
+
"n_questions": 84,
|
234 |
+
"date_title": "22 \u043c\u0430\u044f 2025"
|
235 |
}
|