ai-forever commited on
Commit
323e9a7
·
verified ·
1 Parent(s): bce6f4a

Add/update results for Llama 3.3 70B (version 0.1.1, guid 73ae38971b0c48a2bedd143db7c25d3b)

Browse files
Files changed (1) hide show
  1. results.json +85 -0
results.json CHANGED
@@ -85,6 +85,91 @@
85
  "n_questions": 250,
86
  "submit_timestamp": ""
87
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
88
  }
89
  }
90
  },
 
85
  "n_questions": 250,
86
  "submit_timestamp": ""
87
  }
88
+ },
89
+ "73ae38971b0c48a2bedd143db7c25d3b": {
90
+ "model_name": "Llama 3.3 70B",
91
+ "timestamp": "2025-03-28T10:51:17",
92
+ "config": {
93
+ "embedding_model": "sentence-transformers/distiluse-base-multilingual-cased",
94
+ "retriever_type": "mmr",
95
+ "retrieval_config": {
96
+ "top_k": 5,
97
+ "chunk_size": 1000,
98
+ "chunk_overlap": 100
99
+ }
100
+ },
101
+ "metrics": {
102
+ "cond": {
103
+ "retrieval": {
104
+ "hit_rate": 0.64,
105
+ "mrr": 0.514,
106
+ "precision": 0.14400000000000002
107
+ },
108
+ "generation": {
109
+ "rouge1": 0.2493333333333333,
110
+ "rougeL": 0.2493333333333333
111
+ }
112
+ },
113
+ "comp": {
114
+ "retrieval": {
115
+ "hit_rate": 0.72,
116
+ "mrr": 0.4933333333333333,
117
+ "precision": 0.2
118
+ },
119
+ "generation": {
120
+ "rouge1": 0.23564204559660382,
121
+ "rougeL": 0.23564204559660382
122
+ }
123
+ },
124
+ "set": {
125
+ "retrieval": {
126
+ "hit_rate": 0.5,
127
+ "mrr": 0.3683333333333333,
128
+ "precision": 0.12400000000000003
129
+ },
130
+ "generation": {
131
+ "rouge1": 0.21594023633499185,
132
+ "rougeL": 0.17318924819269935
133
+ }
134
+ },
135
+ "mh": {
136
+ "retrieval": {
137
+ "hit_rate": 0.58,
138
+ "mrr": 0.3846666666666666,
139
+ "precision": 0.14800000000000002
140
+ },
141
+ "generation": {
142
+ "rouge1": 0.19785714285714284,
143
+ "rougeL": 0.19785714285714284
144
+ }
145
+ },
146
+ "simple": {
147
+ "retrieval": {
148
+ "hit_rate": 0.48,
149
+ "mrr": 0.33399999999999996,
150
+ "precision": 0.12
151
+ },
152
+ "generation": {
153
+ "rouge1": 0.16162049062049064,
154
+ "rougeL": 0.16162049062049064
155
+ }
156
+ },
157
+ "overall": {
158
+ "retrieval": {
159
+ "hit_rate": 0.584,
160
+ "mrr": 0.41886666666666666,
161
+ "precision": 0.1472
162
+ },
163
+ "generation": {
164
+ "rouge1": 0.21207864974851248,
165
+ "rougeL": 0.20352845212005397
166
+ }
167
+ }
168
+ },
169
+ "metadata": {
170
+ "n_questions": 250,
171
+ "submit_timestamp": ""
172
+ }
173
  }
174
  }
175
  },