pythia-14m-embedding / mean_pooling /AmazonCounterfactualClassification.json
jstephencorey's picture
Upload folder using huggingface_hub
161cecf
{
"dataset_revision": "e8379541af4e31359cca9fbcf4b00f2671dba205",
"mteb_dataset_name": "AmazonCounterfactualClassification",
"mteb_version": "1.1.1",
"test": {
"de": {
"accuracy": 0.6229122055674517,
"accuracy_stderr": 0.02689391560397646,
"ap": 0.7654273024410109,
"ap_stderr": 0.014834546530994171,
"f1": 0.6037703210343267,
"f1_stderr": 0.023236308207408476,
"main_score": 0.6229122055674517
},
"en": {
"accuracy": 0.7073134328358208,
"accuracy_stderr": 0.0383582089552239,
"ap": 0.3235996836729783,
"ap_stderr": 0.026484778108220294,
"f1": 0.642137087561157,
"f1_stderr": 0.03200155942638674,
"main_score": 0.7073134328358208
},
"en-ext": {
"accuracy": 0.6757871064467766,
"accuracy_stderr": 0.04988162814232977,
"ap": 0.1703033311712744,
"ap_stderr": 0.013926829446773738,
"f1": 0.5482175063189498,
"f1_stderr": 0.03163298075732844,
"main_score": 0.6757871064467766
},
"evaluation_time": 10.55,
"ja": {
"accuracy": 0.6251605995717344,
"accuracy_stderr": 0.053002616964599826,
"ap": 0.14367489440317666,
"ap_stderr": 0.012527680056177723,
"f1": 0.5048473578289779,
"f1_stderr": 0.030994925213475993,
"main_score": 0.6251605995717344
}
}
}