Spaces:
Running
Running
Add/update results for My model 1000b (version 1.65.0, guid fa3cb60e3e394a60a00a5e0be3ebe657)
Browse files- results.json +74 -0
results.json
CHANGED
@@ -148,6 +148,80 @@
|
|
148 |
"n_questions": 600,
|
149 |
"submit_timestamp": ""
|
150 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
151 |
}
|
152 |
}
|
153 |
},
|
|
|
148 |
"n_questions": 600,
|
149 |
"submit_timestamp": ""
|
150 |
}
|
151 |
+
},
|
152 |
+
"fa3cb60e3e394a60a00a5e0be3ebe657": {
|
153 |
+
"model_name": "My model 1000b",
|
154 |
+
"timestamp": "2025-07-02T18:31:26",
|
155 |
+
"config": {
|
156 |
+
"embedding_model": "sentence-transformers/distiluse-base-multilingual-cased",
|
157 |
+
"retriever_type": "mmr",
|
158 |
+
"retrieval_config": {
|
159 |
+
"top_k": 5,
|
160 |
+
"chunk_size": 1000,
|
161 |
+
"chunk_overlap": 100
|
162 |
+
}
|
163 |
+
},
|
164 |
+
"metrics": {
|
165 |
+
"simple": {
|
166 |
+
"retrieval": {
|
167 |
+
"hit_rate": 0.0,
|
168 |
+
"mrr": 0.0,
|
169 |
+
"precision": 0.0
|
170 |
+
},
|
171 |
+
"generation": {
|
172 |
+
"rouge1": 0.010610951697752271,
|
173 |
+
"rougeL": 0.009067741821209062
|
174 |
+
}
|
175 |
+
},
|
176 |
+
"cond": {
|
177 |
+
"retrieval": {
|
178 |
+
"hit_rate": 0.015625,
|
179 |
+
"mrr": 0.003125,
|
180 |
+
"precision": 0.003125
|
181 |
+
},
|
182 |
+
"generation": {
|
183 |
+
"rouge1": 0.004464285714285715,
|
184 |
+
"rougeL": 0.004464285714285715
|
185 |
+
}
|
186 |
+
},
|
187 |
+
"set": {
|
188 |
+
"retrieval": {
|
189 |
+
"hit_rate": 0.017241379310344827,
|
190 |
+
"mrr": 0.003448275862068966,
|
191 |
+
"precision": 0.003448275862068966
|
192 |
+
},
|
193 |
+
"generation": {
|
194 |
+
"rouge1": 0.018844144152582017,
|
195 |
+
"rougeL": 0.018844144152582017
|
196 |
+
}
|
197 |
+
},
|
198 |
+
"mh": {
|
199 |
+
"retrieval": {
|
200 |
+
"hit_rate": 0.03571428571428571,
|
201 |
+
"mrr": 0.01488095238095238,
|
202 |
+
"precision": 0.0071428571428571435
|
203 |
+
},
|
204 |
+
"generation": {
|
205 |
+
"rouge1": 0.0,
|
206 |
+
"rougeL": 0.0
|
207 |
+
}
|
208 |
+
},
|
209 |
+
"overall": {
|
210 |
+
"retrieval": {
|
211 |
+
"hit_rate": 0.016,
|
212 |
+
"mrr": 0.004933333333333333,
|
213 |
+
"precision": 0.0032
|
214 |
+
},
|
215 |
+
"generation": {
|
216 |
+
"rouge1": 0.008570652675208823,
|
217 |
+
"rougeL": 0.008126208230764381
|
218 |
+
}
|
219 |
+
}
|
220 |
+
},
|
221 |
+
"metadata": {
|
222 |
+
"n_questions": 600,
|
223 |
+
"submit_timestamp": ""
|
224 |
+
}
|
225 |
}
|
226 |
}
|
227 |
},
|