ai-forever commited on
Commit
40ba94f
·
verified ·
1 Parent(s): eaf544f

Add/update results for Llama 3.3 70B (version 0.1.2, guid 73ae38971b0c48a2bedd143db7c25d3b)

Browse files
Files changed (1) hide show
  1. results.json +88 -1
results.json CHANGED
@@ -256,9 +256,96 @@
256
  "submit_timestamp": ""
257
  }
258
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
259
  }
260
  },
261
- "last_version": "0.1.1",
262
  "n_questions": 250,
263
  "date_title": "28 \u043c\u0430\u0440\u0442\u0430 2025"
264
  }
 
256
  "submit_timestamp": ""
257
  }
258
  }
259
+ },
260
+ "0.1.2": {
261
+ "73ae38971b0c48a2bedd143db7c25d3b": {
262
+ "model_name": "Llama 3.3 70B",
263
+ "timestamp": "2025-03-28T10:55:59",
264
+ "config": {
265
+ "embedding_model": "sentence-transformers/distiluse-base-multilingual-cased",
266
+ "retriever_type": "mmr",
267
+ "retrieval_config": {
268
+ "top_k": 5,
269
+ "chunk_size": 1000,
270
+ "chunk_overlap": 100
271
+ }
272
+ },
273
+ "metrics": {
274
+ "cond": {
275
+ "retrieval": {
276
+ "hit_rate": 0.64,
277
+ "mrr": 0.514,
278
+ "precision": 0.14400000000000002
279
+ },
280
+ "generation": {
281
+ "rouge1": 0.2493333333333333,
282
+ "rougeL": 0.2493333333333333
283
+ }
284
+ },
285
+ "comp": {
286
+ "retrieval": {
287
+ "hit_rate": 0.72,
288
+ "mrr": 0.4933333333333333,
289
+ "precision": 0.2
290
+ },
291
+ "generation": {
292
+ "rouge1": 0.23564204559660382,
293
+ "rougeL": 0.23564204559660382
294
+ }
295
+ },
296
+ "set": {
297
+ "retrieval": {
298
+ "hit_rate": 0.5,
299
+ "mrr": 0.3683333333333333,
300
+ "precision": 0.12400000000000003
301
+ },
302
+ "generation": {
303
+ "rouge1": 0.21594023633499185,
304
+ "rougeL": 0.17318924819269935
305
+ }
306
+ },
307
+ "mh": {
308
+ "retrieval": {
309
+ "hit_rate": 0.58,
310
+ "mrr": 0.3846666666666666,
311
+ "precision": 0.14800000000000002
312
+ },
313
+ "generation": {
314
+ "rouge1": 0.19785714285714284,
315
+ "rougeL": 0.19785714285714284
316
+ }
317
+ },
318
+ "simple": {
319
+ "retrieval": {
320
+ "hit_rate": 0.48,
321
+ "mrr": 0.33399999999999996,
322
+ "precision": 0.12
323
+ },
324
+ "generation": {
325
+ "rouge1": 0.16162049062049064,
326
+ "rougeL": 0.16162049062049064
327
+ }
328
+ },
329
+ "overall": {
330
+ "retrieval": {
331
+ "hit_rate": 0.584,
332
+ "mrr": 0.41886666666666666,
333
+ "precision": 0.1472
334
+ },
335
+ "generation": {
336
+ "rouge1": 0.21207864974851248,
337
+ "rougeL": 0.20352845212005397
338
+ }
339
+ }
340
+ },
341
+ "metadata": {
342
+ "n_questions": 250,
343
+ "submit_timestamp": ""
344
+ }
345
+ }
346
  }
347
  },
348
+ "last_version": "0.1.2",
349
  "n_questions": 250,
350
  "date_title": "28 \u043c\u0430\u0440\u0442\u0430 2025"
351
  }