File size: 1,499 Bytes
161cecf |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 |
{
"dataset_revision": "ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba",
"mteb_dataset_name": "MTOPIntentClassification",
"mteb_version": "1.1.1",
"test": {
"de": {
"accuracy": 0.33285432516201746,
"accuracy_stderr": 0.024059899232158014,
"f1": 0.19841703666811564,
"f1_stderr": 0.012841274121400391,
"main_score": 0.33285432516201746
},
"en": {
"accuracy": 0.3401276789785682,
"accuracy_stderr": 0.029371050339530268,
"f1": 0.21256775922291288,
"f1_stderr": 0.011736604738126457,
"main_score": 0.3401276789785682
},
"es": {
"accuracy": 0.32121414276184124,
"accuracy_stderr": 0.027404397473574028,
"f1": 0.1934706868150749,
"f1_stderr": 0.012035496308061593,
"main_score": 0.32121414276184124
},
"evaluation_time": 11.03,
"fr": {
"accuracy": 0.26088318196053867,
"accuracy_stderr": 0.019005974782572304,
"f1": 0.1722608011891254,
"f1_stderr": 0.00802124347675454,
"main_score": 0.26088318196053867
},
"hi": {
"accuracy": 0.15320903549659376,
"accuracy_stderr": 0.02031759573423477,
"f1": 0.0962002916015258,
"f1_stderr": 0.009863519144177834,
"main_score": 0.15320903549659376
},
"th": {
"accuracy": 0.16426763110307413,
"accuracy_stderr": 0.019874265095542725,
"f1": 0.11023799171137183,
"f1_stderr": 0.005487299111661436,
"main_score": 0.16426763110307413
}
}
} |