ai-forever commited on
Commit
369eb4e
·
verified ·
1 Parent(s): 76b7eb6

Add/update results for hf_user/my_model (version 1.65.0, guid dfe4921475b4427dacb7450b20780219)

Browse files
Files changed (1) hide show
  1. results.json +74 -0
results.json CHANGED
@@ -222,6 +222,80 @@
222
  "n_questions": 600,
223
  "submit_timestamp": ""
224
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
225
  }
226
  }
227
  },
 
222
  "n_questions": 600,
223
  "submit_timestamp": ""
224
  }
225
+ },
226
+ "dfe4921475b4427dacb7450b20780219": {
227
+ "model_name": "hf_user/my_model",
228
+ "timestamp": "2025-07-02T18:35:31",
229
+ "config": {
230
+ "embedding_model": "hf_user/my_embedder",
231
+ "retriever_type": "mmr",
232
+ "retrieval_config": {
233
+ "top_k": 5,
234
+ "chunk_size": 1000,
235
+ "chunk_overlap": 100
236
+ }
237
+ },
238
+ "metrics": {
239
+ "simple": {
240
+ "retrieval": {
241
+ "hit_rate": 0.82,
242
+ "mrr": 0.7896666666666667,
243
+ "precision": 0.18266666666666664
244
+ },
245
+ "generation": {
246
+ "rouge1": 0.39371534830181254,
247
+ "rougeL": 0.3930892733104125
248
+ }
249
+ },
250
+ "cond": {
251
+ "retrieval": {
252
+ "hit_rate": 0.8733333333333333,
253
+ "mrr": 0.8282222222222223,
254
+ "precision": 0.20266666666666666
255
+ },
256
+ "generation": {
257
+ "rouge1": 0.6521336517193465,
258
+ "rougeL": 0.6511459973983588
259
+ }
260
+ },
261
+ "set": {
262
+ "retrieval": {
263
+ "hit_rate": 0.8,
264
+ "mrr": 0.7502222222222222,
265
+ "precision": 0.18933333333333333
266
+ },
267
+ "generation": {
268
+ "rouge1": 0.25690399303782746,
269
+ "rougeL": 0.18563893045825555
270
+ }
271
+ },
272
+ "mh": {
273
+ "retrieval": {
274
+ "hit_rate": 0.8066666666666666,
275
+ "mrr": 0.7641111111111111,
276
+ "precision": 0.17333333333333328
277
+ },
278
+ "generation": {
279
+ "rouge1": 0.4650920889738333,
280
+ "rougeL": 0.4641397080214524
281
+ }
282
+ },
283
+ "overall": {
284
+ "retrieval": {
285
+ "hit_rate": 0.825,
286
+ "mrr": 0.7830555555555555,
287
+ "precision": 0.18699999999999997
288
+ },
289
+ "generation": {
290
+ "rouge1": 0.441961270508205,
291
+ "rougeL": 0.42350347729711985
292
+ }
293
+ }
294
+ },
295
+ "metadata": {
296
+ "n_questions": 600,
297
+ "submit_timestamp": ""
298
+ }
299
  }
300
  }
301
  },