pythia-14m-embedding / mean_pooling /MTOPDomainClassification.json
jstephencorey's picture
Upload folder using huggingface_hub
161cecf
{
"dataset_revision": "d80d48c1eb48d3562165c59d59d0034df9fff0bf",
"mteb_dataset_name": "MTOPDomainClassification",
"mteb_version": "1.1.1",
"test": {
"de": {
"accuracy": 0.5396167934629474,
"accuracy_stderr": 0.020077120782820437,
"f1": 0.516707117653683,
"f1_stderr": 0.018922307862777602,
"main_score": 0.5396167934629474
},
"en": {
"accuracy": 0.6439808481532147,
"accuracy_stderr": 0.03105821414916843,
"f1": 0.6346827081871262,
"f1_stderr": 0.03254766251828963,
"main_score": 0.6439808481532147
},
"es": {
"accuracy": 0.5701801200800534,
"accuracy_stderr": 0.017117095962517787,
"f1": 0.5423413458037234,
"f1_stderr": 0.017782921779650813,
"main_score": 0.5701801200800534
},
"evaluation_time": 5.93,
"fr": {
"accuracy": 0.4884434700908236,
"accuracy_stderr": 0.01961709020920114,
"f1": 0.46484941805279867,
"f1_stderr": 0.02125430774387838,
"main_score": 0.4884434700908236
},
"hi": {
"accuracy": 0.397669415561133,
"accuracy_stderr": 0.023968004685399148,
"f1": 0.35509743255298776,
"f1_stderr": 0.024088371187152582,
"main_score": 0.397669415561133
},
"th": {
"accuracy": 0.4258951175406872,
"accuracy_stderr": 0.022155079669758066,
"f1": 0.40472444227858884,
"f1_stderr": 0.02201752778884968,
"main_score": 0.4258951175406872
}
}
}