sha
stringlengths
40
40
text
stringlengths
1
13.4M
id
stringlengths
2
117
tags
listlengths
1
7.91k
created_at
stringlengths
25
25
metadata
stringlengths
2
875k
last_modified
stringlengths
25
25
arxiv
listlengths
0
25
languages
listlengths
0
7.91k
tags_str
stringlengths
17
159k
text_str
stringlengths
1
447k
text_lists
listlengths
0
352
processed_texts
listlengths
1
353
tokens_length
listlengths
1
353
input_texts
listlengths
1
40
50af6327ef57b32e2cb85a6e68b1f54bb6d91e18
# Dataset Card for "BGL_BERT_Baseline" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
EgilKarlsen/BGL_BERT_Baseline
[ "region:us" ]
2023-08-18T13:20:24+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "0", "dtype": "float32"}, {"name": "1", "dtype": "float32"}, {"name": "2", "dtype": "float32"}, {"name": "3", "dtype": "float32"}, {"name": "4", "dtype": "float32"}, {"name": "5", "dtype": "float32"}, {"name": "6", "dtype": "float32"}, {"name": "7", "dtype": "float32"}, {"name": "8", "dtype": "float32"}, {"name": "9", "dtype": "float32"}, {"name": "10", "dtype": "float32"}, {"name": "11", "dtype": "float32"}, {"name": "12", "dtype": "float32"}, {"name": "13", "dtype": "float32"}, {"name": "14", "dtype": "float32"}, {"name": "15", "dtype": "float32"}, {"name": "16", "dtype": "float32"}, {"name": "17", "dtype": "float32"}, {"name": "18", "dtype": "float32"}, {"name": "19", "dtype": "float32"}, {"name": "20", "dtype": "float32"}, {"name": "21", "dtype": "float32"}, {"name": "22", "dtype": "float32"}, {"name": "23", "dtype": "float32"}, {"name": "24", "dtype": "float32"}, {"name": "25", "dtype": "float32"}, {"name": "26", "dtype": "float32"}, {"name": "27", "dtype": "float32"}, {"name": "28", "dtype": "float32"}, {"name": "29", "dtype": "float32"}, {"name": "30", "dtype": "float32"}, {"name": "31", "dtype": "float32"}, {"name": "32", "dtype": "float32"}, {"name": "33", "dtype": "float32"}, {"name": "34", "dtype": "float32"}, {"name": "35", "dtype": "float32"}, {"name": "36", "dtype": "float32"}, {"name": "37", "dtype": "float32"}, {"name": "38", "dtype": "float32"}, {"name": "39", "dtype": "float32"}, {"name": "40", "dtype": "float32"}, {"name": "41", "dtype": "float32"}, {"name": "42", "dtype": "float32"}, {"name": "43", "dtype": "float32"}, {"name": "44", "dtype": "float32"}, {"name": "45", "dtype": "float32"}, {"name": "46", "dtype": "float32"}, {"name": "47", "dtype": "float32"}, {"name": "48", "dtype": "float32"}, {"name": "49", "dtype": "float32"}, {"name": "50", "dtype": "float32"}, {"name": "51", "dtype": "float32"}, {"name": "52", "dtype": "float32"}, {"name": "53", "dtype": "float32"}, {"name": "54", "dtype": "float32"}, {"name": "55", "dtype": "float32"}, {"name": "56", "dtype": "float32"}, {"name": "57", "dtype": "float32"}, {"name": "58", "dtype": "float32"}, {"name": "59", "dtype": "float32"}, {"name": "60", "dtype": "float32"}, {"name": "61", "dtype": "float32"}, {"name": "62", "dtype": "float32"}, {"name": "63", "dtype": "float32"}, {"name": "64", "dtype": "float32"}, {"name": "65", "dtype": "float32"}, {"name": "66", "dtype": "float32"}, {"name": "67", "dtype": "float32"}, {"name": "68", "dtype": "float32"}, {"name": "69", "dtype": "float32"}, {"name": "70", "dtype": "float32"}, {"name": "71", "dtype": "float32"}, {"name": "72", "dtype": "float32"}, {"name": "73", "dtype": "float32"}, {"name": "74", "dtype": "float32"}, {"name": "75", "dtype": "float32"}, {"name": "76", "dtype": "float32"}, {"name": "77", "dtype": "float32"}, {"name": "78", "dtype": "float32"}, {"name": "79", "dtype": "float32"}, {"name": "80", "dtype": "float32"}, {"name": "81", "dtype": "float32"}, {"name": "82", "dtype": "float32"}, {"name": "83", "dtype": "float32"}, {"name": "84", "dtype": "float32"}, {"name": "85", "dtype": "float32"}, {"name": "86", "dtype": "float32"}, {"name": "87", "dtype": "float32"}, {"name": "88", "dtype": "float32"}, {"name": "89", "dtype": "float32"}, {"name": "90", "dtype": "float32"}, {"name": "91", "dtype": "float32"}, {"name": "92", "dtype": "float32"}, {"name": "93", "dtype": "float32"}, {"name": "94", "dtype": "float32"}, {"name": "95", "dtype": "float32"}, {"name": "96", "dtype": "float32"}, {"name": "97", "dtype": "float32"}, {"name": "98", "dtype": "float32"}, {"name": "99", "dtype": "float32"}, {"name": "100", "dtype": "float32"}, {"name": "101", "dtype": "float32"}, {"name": "102", "dtype": "float32"}, {"name": "103", "dtype": "float32"}, {"name": "104", "dtype": "float32"}, {"name": "105", "dtype": "float32"}, {"name": "106", "dtype": "float32"}, {"name": "107", "dtype": "float32"}, {"name": "108", "dtype": "float32"}, {"name": "109", "dtype": "float32"}, {"name": "110", "dtype": "float32"}, {"name": "111", "dtype": "float32"}, {"name": "112", "dtype": "float32"}, {"name": "113", "dtype": "float32"}, {"name": "114", "dtype": "float32"}, {"name": "115", "dtype": "float32"}, {"name": "116", "dtype": "float32"}, {"name": "117", "dtype": "float32"}, {"name": "118", "dtype": "float32"}, {"name": "119", "dtype": "float32"}, {"name": "120", "dtype": "float32"}, {"name": "121", "dtype": "float32"}, {"name": "122", "dtype": "float32"}, {"name": "123", "dtype": "float32"}, {"name": "124", "dtype": "float32"}, {"name": "125", "dtype": "float32"}, {"name": "126", "dtype": "float32"}, {"name": "127", "dtype": "float32"}, {"name": "128", "dtype": "float32"}, {"name": "129", "dtype": "float32"}, {"name": "130", "dtype": "float32"}, {"name": "131", "dtype": "float32"}, {"name": "132", "dtype": "float32"}, {"name": "133", "dtype": "float32"}, {"name": "134", "dtype": "float32"}, {"name": "135", "dtype": "float32"}, {"name": "136", "dtype": "float32"}, {"name": "137", "dtype": "float32"}, {"name": "138", "dtype": "float32"}, {"name": "139", "dtype": "float32"}, {"name": "140", "dtype": "float32"}, {"name": "141", "dtype": "float32"}, {"name": "142", "dtype": "float32"}, {"name": "143", "dtype": "float32"}, {"name": "144", "dtype": "float32"}, {"name": "145", "dtype": "float32"}, {"name": "146", "dtype": "float32"}, {"name": "147", "dtype": "float32"}, {"name": "148", "dtype": "float32"}, {"name": "149", "dtype": "float32"}, {"name": "150", "dtype": "float32"}, {"name": "151", "dtype": "float32"}, {"name": "152", "dtype": "float32"}, {"name": "153", "dtype": "float32"}, {"name": "154", "dtype": "float32"}, {"name": "155", "dtype": "float32"}, {"name": "156", "dtype": "float32"}, {"name": "157", "dtype": "float32"}, {"name": "158", "dtype": "float32"}, {"name": "159", "dtype": "float32"}, {"name": "160", "dtype": "float32"}, {"name": "161", "dtype": "float32"}, {"name": "162", "dtype": "float32"}, {"name": "163", "dtype": "float32"}, {"name": "164", "dtype": "float32"}, {"name": "165", "dtype": "float32"}, {"name": "166", "dtype": "float32"}, {"name": "167", "dtype": "float32"}, {"name": "168", "dtype": "float32"}, {"name": "169", "dtype": "float32"}, {"name": "170", "dtype": "float32"}, {"name": "171", "dtype": "float32"}, {"name": "172", "dtype": "float32"}, {"name": "173", "dtype": "float32"}, {"name": "174", "dtype": "float32"}, {"name": "175", "dtype": "float32"}, {"name": "176", "dtype": "float32"}, {"name": "177", "dtype": "float32"}, {"name": "178", "dtype": "float32"}, {"name": "179", "dtype": "float32"}, {"name": "180", "dtype": "float32"}, {"name": "181", "dtype": "float32"}, {"name": "182", "dtype": "float32"}, {"name": "183", "dtype": "float32"}, {"name": "184", "dtype": "float32"}, {"name": "185", "dtype": "float32"}, {"name": "186", "dtype": "float32"}, {"name": "187", "dtype": "float32"}, {"name": "188", "dtype": "float32"}, {"name": "189", "dtype": "float32"}, {"name": "190", "dtype": "float32"}, {"name": "191", "dtype": "float32"}, {"name": "192", "dtype": "float32"}, {"name": "193", "dtype": "float32"}, {"name": "194", "dtype": "float32"}, {"name": "195", "dtype": "float32"}, {"name": "196", "dtype": "float32"}, {"name": "197", "dtype": "float32"}, {"name": "198", "dtype": "float32"}, {"name": "199", "dtype": "float32"}, {"name": "200", "dtype": "float32"}, {"name": "201", "dtype": "float32"}, {"name": "202", "dtype": "float32"}, {"name": "203", "dtype": "float32"}, {"name": "204", "dtype": "float32"}, {"name": "205", "dtype": "float32"}, {"name": "206", "dtype": "float32"}, {"name": "207", "dtype": "float32"}, {"name": "208", "dtype": "float32"}, {"name": "209", "dtype": "float32"}, {"name": "210", "dtype": "float32"}, {"name": "211", "dtype": "float32"}, {"name": "212", "dtype": "float32"}, {"name": "213", "dtype": "float32"}, {"name": "214", "dtype": "float32"}, {"name": "215", "dtype": "float32"}, {"name": "216", "dtype": "float32"}, {"name": "217", "dtype": "float32"}, {"name": "218", "dtype": "float32"}, {"name": "219", "dtype": "float32"}, {"name": "220", "dtype": "float32"}, {"name": "221", "dtype": "float32"}, {"name": "222", "dtype": "float32"}, {"name": "223", "dtype": "float32"}, {"name": "224", "dtype": "float32"}, {"name": "225", "dtype": "float32"}, {"name": "226", "dtype": "float32"}, {"name": "227", "dtype": "float32"}, {"name": "228", "dtype": "float32"}, {"name": "229", "dtype": "float32"}, {"name": "230", "dtype": "float32"}, {"name": "231", "dtype": "float32"}, {"name": "232", "dtype": "float32"}, {"name": "233", "dtype": "float32"}, {"name": "234", "dtype": "float32"}, {"name": "235", "dtype": "float32"}, {"name": "236", "dtype": "float32"}, {"name": "237", "dtype": "float32"}, {"name": "238", "dtype": "float32"}, {"name": "239", "dtype": "float32"}, {"name": "240", "dtype": "float32"}, {"name": "241", "dtype": "float32"}, {"name": "242", "dtype": "float32"}, {"name": "243", "dtype": "float32"}, {"name": "244", "dtype": "float32"}, {"name": "245", "dtype": "float32"}, {"name": "246", "dtype": "float32"}, {"name": "247", "dtype": "float32"}, {"name": "248", "dtype": "float32"}, {"name": "249", "dtype": "float32"}, {"name": "250", "dtype": "float32"}, {"name": "251", "dtype": "float32"}, {"name": "252", "dtype": "float32"}, {"name": "253", "dtype": "float32"}, {"name": "254", "dtype": "float32"}, {"name": "255", "dtype": "float32"}, {"name": "256", "dtype": "float32"}, {"name": "257", "dtype": "float32"}, {"name": "258", "dtype": "float32"}, {"name": "259", "dtype": "float32"}, {"name": "260", "dtype": "float32"}, {"name": "261", "dtype": "float32"}, {"name": "262", "dtype": "float32"}, {"name": "263", "dtype": "float32"}, {"name": "264", "dtype": "float32"}, {"name": "265", "dtype": "float32"}, {"name": "266", "dtype": "float32"}, {"name": "267", "dtype": "float32"}, {"name": "268", "dtype": "float32"}, {"name": "269", "dtype": "float32"}, {"name": "270", "dtype": "float32"}, {"name": "271", "dtype": "float32"}, {"name": "272", "dtype": "float32"}, {"name": "273", "dtype": "float32"}, {"name": "274", "dtype": "float32"}, {"name": "275", "dtype": "float32"}, {"name": "276", "dtype": "float32"}, {"name": "277", "dtype": "float32"}, {"name": "278", "dtype": "float32"}, {"name": "279", "dtype": "float32"}, {"name": "280", "dtype": "float32"}, {"name": "281", "dtype": "float32"}, {"name": "282", "dtype": "float32"}, {"name": "283", "dtype": "float32"}, {"name": "284", "dtype": "float32"}, {"name": "285", "dtype": "float32"}, {"name": "286", "dtype": "float32"}, {"name": "287", "dtype": "float32"}, {"name": "288", "dtype": "float32"}, {"name": "289", "dtype": "float32"}, {"name": "290", "dtype": "float32"}, {"name": "291", "dtype": "float32"}, {"name": "292", "dtype": "float32"}, {"name": "293", "dtype": "float32"}, {"name": "294", "dtype": "float32"}, {"name": "295", "dtype": "float32"}, {"name": "296", "dtype": "float32"}, {"name": "297", "dtype": "float32"}, {"name": "298", "dtype": "float32"}, {"name": "299", "dtype": "float32"}, {"name": "300", "dtype": "float32"}, {"name": "301", "dtype": "float32"}, {"name": "302", "dtype": "float32"}, {"name": "303", "dtype": "float32"}, {"name": "304", "dtype": "float32"}, {"name": "305", "dtype": "float32"}, {"name": "306", "dtype": "float32"}, {"name": "307", "dtype": "float32"}, {"name": "308", "dtype": "float32"}, {"name": "309", "dtype": "float32"}, {"name": "310", "dtype": "float32"}, {"name": "311", "dtype": "float32"}, {"name": "312", "dtype": "float32"}, {"name": "313", "dtype": "float32"}, {"name": "314", "dtype": "float32"}, {"name": "315", "dtype": "float32"}, {"name": "316", "dtype": "float32"}, {"name": "317", "dtype": "float32"}, {"name": "318", "dtype": "float32"}, {"name": "319", "dtype": "float32"}, {"name": "320", "dtype": "float32"}, {"name": "321", "dtype": "float32"}, {"name": "322", "dtype": "float32"}, {"name": "323", "dtype": "float32"}, {"name": "324", "dtype": "float32"}, {"name": "325", "dtype": "float32"}, {"name": "326", "dtype": "float32"}, {"name": "327", "dtype": "float32"}, {"name": "328", "dtype": "float32"}, {"name": "329", "dtype": "float32"}, {"name": "330", "dtype": "float32"}, {"name": "331", "dtype": "float32"}, {"name": "332", "dtype": "float32"}, {"name": "333", "dtype": "float32"}, {"name": "334", "dtype": "float32"}, {"name": "335", "dtype": "float32"}, {"name": "336", "dtype": "float32"}, {"name": "337", "dtype": "float32"}, {"name": "338", "dtype": "float32"}, {"name": "339", "dtype": "float32"}, {"name": "340", "dtype": "float32"}, {"name": "341", "dtype": "float32"}, {"name": "342", "dtype": "float32"}, {"name": "343", "dtype": "float32"}, {"name": "344", "dtype": "float32"}, {"name": "345", "dtype": "float32"}, {"name": "346", "dtype": "float32"}, {"name": "347", "dtype": "float32"}, {"name": "348", "dtype": "float32"}, {"name": "349", "dtype": "float32"}, {"name": "350", "dtype": "float32"}, {"name": "351", "dtype": "float32"}, {"name": "352", "dtype": "float32"}, {"name": "353", "dtype": "float32"}, {"name": "354", "dtype": "float32"}, {"name": "355", "dtype": "float32"}, {"name": "356", "dtype": "float32"}, {"name": "357", "dtype": "float32"}, {"name": "358", "dtype": "float32"}, {"name": "359", "dtype": "float32"}, {"name": "360", "dtype": "float32"}, {"name": "361", "dtype": "float32"}, {"name": "362", "dtype": "float32"}, {"name": "363", "dtype": "float32"}, {"name": "364", "dtype": "float32"}, {"name": "365", "dtype": "float32"}, {"name": "366", "dtype": "float32"}, {"name": "367", "dtype": "float32"}, {"name": "368", "dtype": "float32"}, {"name": "369", "dtype": "float32"}, {"name": "370", "dtype": "float32"}, {"name": "371", "dtype": "float32"}, {"name": "372", "dtype": "float32"}, {"name": "373", "dtype": "float32"}, {"name": "374", "dtype": "float32"}, {"name": "375", "dtype": "float32"}, {"name": "376", "dtype": "float32"}, {"name": "377", "dtype": "float32"}, {"name": "378", "dtype": "float32"}, {"name": "379", "dtype": "float32"}, {"name": "380", "dtype": "float32"}, {"name": "381", "dtype": "float32"}, {"name": "382", "dtype": "float32"}, {"name": "383", "dtype": "float32"}, {"name": "384", "dtype": "float32"}, {"name": "385", "dtype": "float32"}, {"name": "386", "dtype": "float32"}, {"name": "387", "dtype": "float32"}, {"name": "388", "dtype": "float32"}, {"name": "389", "dtype": "float32"}, {"name": "390", "dtype": "float32"}, {"name": "391", "dtype": "float32"}, {"name": "392", "dtype": "float32"}, {"name": "393", "dtype": "float32"}, {"name": "394", "dtype": "float32"}, {"name": "395", "dtype": "float32"}, {"name": "396", "dtype": "float32"}, {"name": "397", "dtype": "float32"}, {"name": "398", "dtype": "float32"}, {"name": "399", "dtype": "float32"}, {"name": "400", "dtype": "float32"}, {"name": "401", "dtype": "float32"}, {"name": "402", "dtype": "float32"}, {"name": "403", "dtype": "float32"}, {"name": "404", "dtype": "float32"}, {"name": "405", "dtype": "float32"}, {"name": "406", "dtype": "float32"}, {"name": "407", "dtype": "float32"}, {"name": "408", "dtype": "float32"}, {"name": "409", "dtype": "float32"}, {"name": "410", "dtype": "float32"}, {"name": "411", "dtype": "float32"}, {"name": "412", "dtype": "float32"}, {"name": "413", "dtype": "float32"}, {"name": "414", "dtype": "float32"}, {"name": "415", "dtype": "float32"}, {"name": "416", "dtype": "float32"}, {"name": "417", "dtype": "float32"}, {"name": "418", "dtype": "float32"}, {"name": "419", "dtype": "float32"}, {"name": "420", "dtype": "float32"}, {"name": "421", "dtype": "float32"}, {"name": "422", "dtype": "float32"}, {"name": "423", "dtype": "float32"}, {"name": "424", "dtype": "float32"}, {"name": "425", "dtype": "float32"}, {"name": "426", "dtype": "float32"}, {"name": "427", "dtype": "float32"}, {"name": "428", "dtype": "float32"}, {"name": "429", "dtype": "float32"}, {"name": "430", "dtype": "float32"}, {"name": "431", "dtype": "float32"}, {"name": "432", "dtype": "float32"}, {"name": "433", "dtype": "float32"}, {"name": "434", "dtype": "float32"}, {"name": "435", "dtype": "float32"}, {"name": "436", "dtype": "float32"}, {"name": "437", "dtype": "float32"}, {"name": "438", "dtype": "float32"}, {"name": "439", "dtype": "float32"}, {"name": "440", "dtype": "float32"}, {"name": "441", "dtype": "float32"}, {"name": "442", "dtype": "float32"}, {"name": "443", "dtype": "float32"}, {"name": "444", "dtype": "float32"}, {"name": "445", "dtype": "float32"}, {"name": "446", "dtype": "float32"}, {"name": "447", "dtype": "float32"}, {"name": "448", "dtype": "float32"}, {"name": "449", "dtype": "float32"}, {"name": "450", "dtype": "float32"}, {"name": "451", "dtype": "float32"}, {"name": "452", "dtype": "float32"}, {"name": "453", "dtype": "float32"}, {"name": "454", "dtype": "float32"}, {"name": "455", "dtype": "float32"}, {"name": "456", "dtype": "float32"}, {"name": "457", "dtype": "float32"}, {"name": "458", "dtype": "float32"}, {"name": "459", "dtype": "float32"}, {"name": "460", "dtype": "float32"}, {"name": "461", "dtype": "float32"}, {"name": "462", "dtype": "float32"}, {"name": "463", "dtype": "float32"}, {"name": "464", "dtype": "float32"}, {"name": "465", "dtype": "float32"}, {"name": "466", "dtype": "float32"}, {"name": "467", "dtype": "float32"}, {"name": "468", "dtype": "float32"}, {"name": "469", "dtype": "float32"}, {"name": "470", "dtype": "float32"}, {"name": "471", "dtype": "float32"}, {"name": "472", "dtype": "float32"}, {"name": "473", "dtype": "float32"}, {"name": "474", "dtype": "float32"}, {"name": "475", "dtype": "float32"}, {"name": "476", "dtype": "float32"}, {"name": "477", "dtype": "float32"}, {"name": "478", "dtype": "float32"}, {"name": "479", "dtype": "float32"}, {"name": "480", "dtype": "float32"}, {"name": "481", "dtype": "float32"}, {"name": "482", "dtype": "float32"}, {"name": "483", "dtype": "float32"}, {"name": "484", "dtype": "float32"}, {"name": "485", "dtype": "float32"}, {"name": "486", "dtype": "float32"}, {"name": "487", "dtype": "float32"}, {"name": "488", "dtype": "float32"}, {"name": "489", "dtype": "float32"}, {"name": "490", "dtype": "float32"}, {"name": "491", "dtype": "float32"}, {"name": "492", "dtype": "float32"}, {"name": "493", "dtype": "float32"}, {"name": "494", "dtype": "float32"}, {"name": "495", "dtype": "float32"}, {"name": "496", "dtype": "float32"}, {"name": "497", "dtype": "float32"}, {"name": "498", "dtype": "float32"}, {"name": "499", "dtype": "float32"}, {"name": "500", "dtype": "float32"}, {"name": "501", "dtype": "float32"}, {"name": "502", "dtype": "float32"}, {"name": "503", "dtype": "float32"}, {"name": "504", "dtype": "float32"}, {"name": "505", "dtype": "float32"}, {"name": "506", "dtype": "float32"}, {"name": "507", "dtype": "float32"}, {"name": "508", "dtype": "float32"}, {"name": "509", "dtype": "float32"}, {"name": "510", "dtype": "float32"}, {"name": "511", "dtype": "float32"}, {"name": "512", "dtype": "float32"}, {"name": "513", "dtype": "float32"}, {"name": "514", "dtype": "float32"}, {"name": "515", "dtype": "float32"}, {"name": "516", "dtype": "float32"}, {"name": "517", "dtype": "float32"}, {"name": "518", "dtype": "float32"}, {"name": "519", "dtype": "float32"}, {"name": "520", "dtype": "float32"}, {"name": "521", "dtype": "float32"}, {"name": "522", "dtype": "float32"}, {"name": "523", "dtype": "float32"}, {"name": "524", "dtype": "float32"}, {"name": "525", "dtype": "float32"}, {"name": "526", "dtype": "float32"}, {"name": "527", "dtype": "float32"}, {"name": "528", "dtype": "float32"}, {"name": "529", "dtype": "float32"}, {"name": "530", "dtype": "float32"}, {"name": "531", "dtype": "float32"}, {"name": "532", "dtype": "float32"}, {"name": "533", "dtype": "float32"}, {"name": "534", "dtype": "float32"}, {"name": "535", "dtype": "float32"}, {"name": "536", "dtype": "float32"}, {"name": "537", "dtype": "float32"}, {"name": "538", "dtype": "float32"}, {"name": "539", "dtype": "float32"}, {"name": "540", "dtype": "float32"}, {"name": "541", "dtype": "float32"}, {"name": "542", "dtype": "float32"}, {"name": "543", "dtype": "float32"}, {"name": "544", "dtype": "float32"}, {"name": "545", "dtype": "float32"}, {"name": "546", "dtype": "float32"}, {"name": "547", "dtype": "float32"}, {"name": "548", "dtype": "float32"}, {"name": "549", "dtype": "float32"}, {"name": "550", "dtype": "float32"}, {"name": "551", "dtype": "float32"}, {"name": "552", "dtype": "float32"}, {"name": "553", "dtype": "float32"}, {"name": "554", "dtype": "float32"}, {"name": "555", "dtype": "float32"}, {"name": "556", "dtype": "float32"}, {"name": "557", "dtype": "float32"}, {"name": "558", "dtype": "float32"}, {"name": "559", "dtype": "float32"}, {"name": "560", "dtype": "float32"}, {"name": "561", "dtype": "float32"}, {"name": "562", "dtype": "float32"}, {"name": "563", "dtype": "float32"}, {"name": "564", "dtype": "float32"}, {"name": "565", "dtype": "float32"}, {"name": "566", "dtype": "float32"}, {"name": "567", "dtype": "float32"}, {"name": "568", "dtype": "float32"}, {"name": "569", "dtype": "float32"}, {"name": "570", "dtype": "float32"}, {"name": "571", "dtype": "float32"}, {"name": "572", "dtype": "float32"}, {"name": "573", "dtype": "float32"}, {"name": "574", "dtype": "float32"}, {"name": "575", "dtype": "float32"}, {"name": "576", "dtype": "float32"}, {"name": "577", "dtype": "float32"}, {"name": "578", "dtype": "float32"}, {"name": "579", "dtype": "float32"}, {"name": "580", "dtype": "float32"}, {"name": "581", "dtype": "float32"}, {"name": "582", "dtype": "float32"}, {"name": "583", "dtype": "float32"}, {"name": "584", "dtype": "float32"}, {"name": "585", "dtype": "float32"}, {"name": "586", "dtype": "float32"}, {"name": "587", "dtype": "float32"}, {"name": "588", "dtype": "float32"}, {"name": "589", "dtype": "float32"}, {"name": "590", "dtype": "float32"}, {"name": "591", "dtype": "float32"}, {"name": "592", "dtype": "float32"}, {"name": "593", "dtype": "float32"}, {"name": "594", "dtype": "float32"}, {"name": "595", "dtype": "float32"}, {"name": "596", "dtype": "float32"}, {"name": "597", "dtype": "float32"}, {"name": "598", "dtype": "float32"}, {"name": "599", "dtype": "float32"}, {"name": "600", "dtype": "float32"}, {"name": "601", "dtype": "float32"}, {"name": "602", "dtype": "float32"}, {"name": "603", "dtype": "float32"}, {"name": "604", "dtype": "float32"}, {"name": "605", "dtype": "float32"}, {"name": "606", "dtype": "float32"}, {"name": "607", "dtype": "float32"}, {"name": "608", "dtype": "float32"}, {"name": "609", "dtype": "float32"}, {"name": "610", "dtype": "float32"}, {"name": "611", "dtype": "float32"}, {"name": "612", "dtype": "float32"}, {"name": "613", "dtype": "float32"}, {"name": "614", "dtype": "float32"}, {"name": "615", "dtype": "float32"}, {"name": "616", "dtype": "float32"}, {"name": "617", "dtype": "float32"}, {"name": "618", "dtype": "float32"}, {"name": "619", "dtype": "float32"}, {"name": "620", "dtype": "float32"}, {"name": "621", "dtype": "float32"}, {"name": "622", "dtype": "float32"}, {"name": "623", "dtype": "float32"}, {"name": "624", "dtype": "float32"}, {"name": "625", "dtype": "float32"}, {"name": "626", "dtype": "float32"}, {"name": "627", "dtype": "float32"}, {"name": "628", "dtype": "float32"}, {"name": "629", "dtype": "float32"}, {"name": "630", "dtype": "float32"}, {"name": "631", "dtype": "float32"}, {"name": "632", "dtype": "float32"}, {"name": "633", "dtype": "float32"}, {"name": "634", "dtype": "float32"}, {"name": "635", "dtype": "float32"}, {"name": "636", "dtype": "float32"}, {"name": "637", "dtype": "float32"}, {"name": "638", "dtype": "float32"}, {"name": "639", "dtype": "float32"}, {"name": "640", "dtype": "float32"}, {"name": "641", "dtype": "float32"}, {"name": "642", "dtype": "float32"}, {"name": "643", "dtype": "float32"}, {"name": "644", "dtype": "float32"}, {"name": "645", "dtype": "float32"}, {"name": "646", "dtype": "float32"}, {"name": "647", "dtype": "float32"}, {"name": "648", "dtype": "float32"}, {"name": "649", "dtype": "float32"}, {"name": "650", "dtype": "float32"}, {"name": "651", "dtype": "float32"}, {"name": "652", "dtype": "float32"}, {"name": "653", "dtype": "float32"}, {"name": "654", "dtype": "float32"}, {"name": "655", "dtype": "float32"}, {"name": "656", "dtype": "float32"}, {"name": "657", "dtype": "float32"}, {"name": "658", "dtype": "float32"}, {"name": "659", "dtype": "float32"}, {"name": "660", "dtype": "float32"}, {"name": "661", "dtype": "float32"}, {"name": "662", "dtype": "float32"}, {"name": "663", "dtype": "float32"}, {"name": "664", "dtype": "float32"}, {"name": "665", "dtype": "float32"}, {"name": "666", "dtype": "float32"}, {"name": "667", "dtype": "float32"}, {"name": "668", "dtype": "float32"}, {"name": "669", "dtype": "float32"}, {"name": "670", "dtype": "float32"}, {"name": "671", "dtype": "float32"}, {"name": "672", "dtype": "float32"}, {"name": "673", "dtype": "float32"}, {"name": "674", "dtype": "float32"}, {"name": "675", "dtype": "float32"}, {"name": "676", "dtype": "float32"}, {"name": "677", "dtype": "float32"}, {"name": "678", "dtype": "float32"}, {"name": "679", "dtype": "float32"}, {"name": "680", "dtype": "float32"}, {"name": "681", "dtype": "float32"}, {"name": "682", "dtype": "float32"}, {"name": "683", "dtype": "float32"}, {"name": "684", "dtype": "float32"}, {"name": "685", "dtype": "float32"}, {"name": "686", "dtype": "float32"}, {"name": "687", "dtype": "float32"}, {"name": "688", "dtype": "float32"}, {"name": "689", "dtype": "float32"}, {"name": "690", "dtype": "float32"}, {"name": "691", "dtype": "float32"}, {"name": "692", "dtype": "float32"}, {"name": "693", "dtype": "float32"}, {"name": "694", "dtype": "float32"}, {"name": "695", "dtype": "float32"}, {"name": "696", "dtype": "float32"}, {"name": "697", "dtype": "float32"}, {"name": "698", "dtype": "float32"}, {"name": "699", "dtype": "float32"}, {"name": "700", "dtype": "float32"}, {"name": "701", "dtype": "float32"}, {"name": "702", "dtype": "float32"}, {"name": "703", "dtype": "float32"}, {"name": "704", "dtype": "float32"}, {"name": "705", "dtype": "float32"}, {"name": "706", "dtype": "float32"}, {"name": "707", "dtype": "float32"}, {"name": "708", "dtype": "float32"}, {"name": "709", "dtype": "float32"}, {"name": "710", "dtype": "float32"}, {"name": "711", "dtype": "float32"}, {"name": "712", "dtype": "float32"}, {"name": "713", "dtype": "float32"}, {"name": "714", "dtype": "float32"}, {"name": "715", "dtype": "float32"}, {"name": "716", "dtype": "float32"}, {"name": "717", "dtype": "float32"}, {"name": "718", "dtype": "float32"}, {"name": "719", "dtype": "float32"}, {"name": "720", "dtype": "float32"}, {"name": "721", "dtype": "float32"}, {"name": "722", "dtype": "float32"}, {"name": "723", "dtype": "float32"}, {"name": "724", "dtype": "float32"}, {"name": "725", "dtype": "float32"}, {"name": "726", "dtype": "float32"}, {"name": "727", "dtype": "float32"}, {"name": "728", "dtype": "float32"}, {"name": "729", "dtype": "float32"}, {"name": "730", "dtype": "float32"}, {"name": "731", "dtype": "float32"}, {"name": "732", "dtype": "float32"}, {"name": "733", "dtype": "float32"}, {"name": "734", "dtype": "float32"}, {"name": "735", "dtype": "float32"}, {"name": "736", "dtype": "float32"}, {"name": "737", "dtype": "float32"}, {"name": "738", "dtype": "float32"}, {"name": "739", "dtype": "float32"}, {"name": "740", "dtype": "float32"}, {"name": "741", "dtype": "float32"}, {"name": "742", "dtype": "float32"}, {"name": "743", "dtype": "float32"}, {"name": "744", "dtype": "float32"}, {"name": "745", "dtype": "float32"}, {"name": "746", "dtype": "float32"}, {"name": "747", "dtype": "float32"}, {"name": "748", "dtype": "float32"}, {"name": "749", "dtype": "float32"}, {"name": "750", "dtype": "float32"}, {"name": "751", "dtype": "float32"}, {"name": "752", "dtype": "float32"}, {"name": "753", "dtype": "float32"}, {"name": "754", "dtype": "float32"}, {"name": "755", "dtype": "float32"}, {"name": "756", "dtype": "float32"}, {"name": "757", "dtype": "float32"}, {"name": "758", "dtype": "float32"}, {"name": "759", "dtype": "float32"}, {"name": "760", "dtype": "float32"}, {"name": "761", "dtype": "float32"}, {"name": "762", "dtype": "float32"}, {"name": "763", "dtype": "float32"}, {"name": "764", "dtype": "float32"}, {"name": "765", "dtype": "float32"}, {"name": "766", "dtype": "float32"}, {"name": "767", "dtype": "float32"}, {"name": "label", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 115582709.0625, "num_examples": 37500}, {"name": "test", "num_bytes": 38527570.0, "num_examples": 12500}], "download_size": 211882766, "dataset_size": 154110279.0625}}
2023-08-18T14:05:51+00:00
[]
[]
TAGS #region-us
# Dataset Card for "BGL_BERT_Baseline" More Information needed
[ "# Dataset Card for \"BGL_BERT_Baseline\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"BGL_BERT_Baseline\"\n\nMore Information needed" ]
[ 6, 18 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"BGL_BERT_Baseline\"\n\nMore Information needed" ]
3ff7ac18e5df23d30330b992836509839699a688
# Central de Fatos ## Dataset Description - **Homepage:** - **Repository:** [https://zenodo.org/record/5191798](https://zenodo.org/record/5191798) - **Paper:** [https://sol.sbc.org.br/index.php/dsw/article/view/17421/17257](https://sol.sbc.org.br/index.php/dsw/article/view/17421/17257) - **Leaderboard:** - **Point of Contact:** ### Dataset Summary In recent times, the interest for research dissecting the dissemination and prevention of misinformation in the online environment has spiked dramatically. Given that scenario, a recurring obstacle is the unavailability of public datasets containing fact-checked instances. In this work, we performed an extensive data collection of such instances from the better part of all major internationally recognized Brazilian fact-checking agencies. Particularly, this paper offers the research community a novel dataset containing fact-checks from various trustworthy sources regarding a wide range of topics. In total, the resulting collection encompasses 11647 fact-check instances collected across 6 different agencies that can be used for several studies in the contexts of identifying and combating misinformation on digital platforms in Brazil. ### Supported Tasks and Leaderboards [More Information Needed] ### Languages The dataset is in Portuguese. ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information If you use "Central de Fatos", please cite: ```bibtex @inproceedings{dsw, author = {João Couto and Breno Pimenta and Igor M. de Araújo and Samuel Assis and Julio C. S. Reis and Ana Paula da Silva and Jussara Almeida and Fabrício Benevenuto}, title = {Central de Fatos: Um Repositório de Checagens de Fatos}, booktitle = {Anais do III Dataset Showcase Workshop}, location = {Rio de Janeiro}, year = {2021}, keywords = {}, issn = {0000-0000}, pages = {128--137}, publisher = {SBC}, address = {Porto Alegre, RS, Brasil}, doi = {10.5753/dsw.2021.17421}, url = {https://sol.sbc.org.br/index.php/dsw/article/view/17421} } ``` ### Contributions Thanks to [@ju-resplande](https://github.com/ju-resplande) for adding this dataset.
fake-news-UFG/central_de_fatos
[ "task_categories:text-classification", "language_creators:found", "multilinguality:monolingual", "size_categories:10K<n<100K", "language:pt", "license:cc-by-4.0", "region:us" ]
2023-08-18T13:25:35+00:00
{"language_creators": ["found"], "language": ["pt"], "license": "cc-by-4.0", "multilinguality": ["monolingual"], "size_categories": ["10K<n<100K"], "task_categories": ["text-classification"], "pretty_name": "Central de Fatos", "language_details": "pt-BR", "DOI": "10.5281/zenodo.5191798"}
2023-08-18T20:04:07+00:00
[]
[ "pt" ]
TAGS #task_categories-text-classification #language_creators-found #multilinguality-monolingual #size_categories-10K<n<100K #language-Portuguese #license-cc-by-4.0 #region-us
# Central de Fatos ## Dataset Description - Homepage: - Repository: URL - Paper: URL - Leaderboard: - Point of Contact: ### Dataset Summary In recent times, the interest for research dissecting the dissemination and prevention of misinformation in the online environment has spiked dramatically. Given that scenario, a recurring obstacle is the unavailability of public datasets containing fact-checked instances. In this work, we performed an extensive data collection of such instances from the better part of all major internationally recognized Brazilian fact-checking agencies. Particularly, this paper offers the research community a novel dataset containing fact-checks from various trustworthy sources regarding a wide range of topics. In total, the resulting collection encompasses 11647 fact-check instances collected across 6 different agencies that can be used for several studies in the contexts of identifying and combating misinformation on digital platforms in Brazil. ### Supported Tasks and Leaderboards ### Languages The dataset is in Portuguese. ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information If you use "Central de Fatos", please cite: ### Contributions Thanks to @ju-resplande for adding this dataset.
[ "# Central de Fatos", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: URL\n- Leaderboard: \n- Point of Contact:", "### Dataset Summary\n\nIn recent times, the interest for research dissecting the dissemination and prevention of misinformation in the online environment has spiked dramatically.\nGiven that scenario, a recurring obstacle is the unavailability of public datasets containing fact-checked instances.\n\nIn this work, we performed an extensive data collection of such instances from the better part of all major internationally recognized Brazilian fact-checking agencies.\nParticularly, this paper offers the research community a novel dataset containing fact-checks from various trustworthy sources regarding a wide range of topics.\nIn total, the resulting collection encompasses 11647 fact-check instances collected across 6 different agencies that can be used for several studies in the contexts of identifying and combating misinformation on digital platforms in Brazil.", "### Supported Tasks and Leaderboards", "### Languages\n\nThe dataset is in Portuguese.", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information\n\n\n\n\n\nIf you use \"Central de Fatos\", please cite:", "### Contributions\n\nThanks to @ju-resplande for adding this dataset." ]
[ "TAGS\n#task_categories-text-classification #language_creators-found #multilinguality-monolingual #size_categories-10K<n<100K #language-Portuguese #license-cc-by-4.0 #region-us \n", "# Central de Fatos", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: URL\n- Leaderboard: \n- Point of Contact:", "### Dataset Summary\n\nIn recent times, the interest for research dissecting the dissemination and prevention of misinformation in the online environment has spiked dramatically.\nGiven that scenario, a recurring obstacle is the unavailability of public datasets containing fact-checked instances.\n\nIn this work, we performed an extensive data collection of such instances from the better part of all major internationally recognized Brazilian fact-checking agencies.\nParticularly, this paper offers the research community a novel dataset containing fact-checks from various trustworthy sources regarding a wide range of topics.\nIn total, the resulting collection encompasses 11647 fact-check instances collected across 6 different agencies that can be used for several studies in the contexts of identifying and combating misinformation on digital platforms in Brazil.", "### Supported Tasks and Leaderboards", "### Languages\n\nThe dataset is in Portuguese.", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information\n\n\n\n\n\nIf you use \"Central de Fatos\", please cite:", "### Contributions\n\nThanks to @ju-resplande for adding this dataset." ]
[ 60, 5, 26, 187, 10, 13, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 19, 19 ]
[ "passage: TAGS\n#task_categories-text-classification #language_creators-found #multilinguality-monolingual #size_categories-10K<n<100K #language-Portuguese #license-cc-by-4.0 #region-us \n# Central de Fatos## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: URL\n- Leaderboard: \n- Point of Contact:### Dataset Summary\n\nIn recent times, the interest for research dissecting the dissemination and prevention of misinformation in the online environment has spiked dramatically.\nGiven that scenario, a recurring obstacle is the unavailability of public datasets containing fact-checked instances.\n\nIn this work, we performed an extensive data collection of such instances from the better part of all major internationally recognized Brazilian fact-checking agencies.\nParticularly, this paper offers the research community a novel dataset containing fact-checks from various trustworthy sources regarding a wide range of topics.\nIn total, the resulting collection encompasses 11647 fact-check instances collected across 6 different agencies that can be used for several studies in the contexts of identifying and combating misinformation on digital platforms in Brazil.### Supported Tasks and Leaderboards### Languages\n\nThe dataset is in Portuguese.## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information\n\n\n\n\n\nIf you use \"Central de Fatos\", please cite:### Contributions\n\nThanks to @ju-resplande for adding this dataset." ]
dd15713608de46fc7078497658b33fd1cd494be4
# Dataset Card for "directv-zocalos-agosto-5fps" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Seenka/directv-zocalos-agosto-5fps
[ "region:us" ]
2023-08-18T13:29:24+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "frame_time", "dtype": "time64[us]"}, {"name": "video_storage_path", "dtype": "string"}, {"name": "zocalo_id", "dtype": "string"}, {"name": "frame_number", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 185441076.0, "num_examples": 590}], "download_size": 168694210, "dataset_size": 185441076.0}}
2023-08-18T13:31:14+00:00
[]
[]
TAGS #region-us
# Dataset Card for "directv-zocalos-agosto-5fps" More Information needed
[ "# Dataset Card for \"directv-zocalos-agosto-5fps\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"directv-zocalos-agosto-5fps\"\n\nMore Information needed" ]
[ 6, 21 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"directv-zocalos-agosto-5fps\"\n\nMore Information needed" ]
7c02f619436c23b93a2b4b224493f36039a743d4
# Dataset Card for "BGL_RoBERTa_Baseline" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
EgilKarlsen/BGL_RoBERTa_Baseline
[ "region:us" ]
2023-08-18T13:32:56+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "0", "dtype": "float32"}, {"name": "1", "dtype": "float32"}, {"name": "2", "dtype": "float32"}, {"name": "3", "dtype": "float32"}, {"name": "4", "dtype": "float32"}, {"name": "5", "dtype": "float32"}, {"name": "6", "dtype": "float32"}, {"name": "7", "dtype": "float32"}, {"name": "8", "dtype": "float32"}, {"name": "9", "dtype": "float32"}, {"name": "10", "dtype": "float32"}, {"name": "11", "dtype": "float32"}, {"name": "12", "dtype": "float32"}, {"name": "13", "dtype": "float32"}, {"name": "14", "dtype": "float32"}, {"name": "15", "dtype": "float32"}, {"name": "16", "dtype": "float32"}, {"name": "17", "dtype": "float32"}, {"name": "18", "dtype": "float32"}, {"name": "19", "dtype": "float32"}, {"name": "20", "dtype": "float32"}, {"name": "21", "dtype": "float32"}, {"name": "22", "dtype": "float32"}, {"name": "23", "dtype": "float32"}, {"name": "24", "dtype": "float32"}, {"name": "25", "dtype": "float32"}, {"name": "26", "dtype": "float32"}, {"name": "27", "dtype": "float32"}, {"name": "28", "dtype": "float32"}, {"name": "29", "dtype": "float32"}, {"name": "30", "dtype": "float32"}, {"name": "31", "dtype": "float32"}, {"name": "32", "dtype": "float32"}, {"name": "33", "dtype": "float32"}, {"name": "34", "dtype": "float32"}, {"name": "35", "dtype": "float32"}, {"name": "36", "dtype": "float32"}, {"name": "37", "dtype": "float32"}, {"name": "38", "dtype": "float32"}, {"name": "39", "dtype": "float32"}, {"name": "40", "dtype": "float32"}, {"name": "41", "dtype": "float32"}, {"name": "42", "dtype": "float32"}, {"name": "43", "dtype": "float32"}, {"name": "44", "dtype": "float32"}, {"name": "45", "dtype": "float32"}, {"name": "46", "dtype": "float32"}, {"name": "47", "dtype": "float32"}, {"name": "48", "dtype": "float32"}, {"name": "49", "dtype": "float32"}, {"name": "50", "dtype": "float32"}, {"name": "51", "dtype": "float32"}, {"name": "52", "dtype": "float32"}, {"name": "53", "dtype": "float32"}, {"name": "54", "dtype": "float32"}, {"name": "55", "dtype": "float32"}, {"name": "56", "dtype": "float32"}, {"name": "57", "dtype": "float32"}, {"name": "58", "dtype": "float32"}, {"name": "59", "dtype": "float32"}, {"name": "60", "dtype": "float32"}, {"name": "61", "dtype": "float32"}, {"name": "62", "dtype": "float32"}, {"name": "63", "dtype": "float32"}, {"name": "64", "dtype": "float32"}, {"name": "65", "dtype": "float32"}, {"name": "66", "dtype": "float32"}, {"name": "67", "dtype": "float32"}, {"name": "68", "dtype": "float32"}, {"name": "69", "dtype": "float32"}, {"name": "70", "dtype": "float32"}, {"name": "71", "dtype": "float32"}, {"name": "72", "dtype": "float32"}, {"name": "73", "dtype": "float32"}, {"name": "74", "dtype": "float32"}, {"name": "75", "dtype": "float32"}, {"name": "76", "dtype": "float32"}, {"name": "77", "dtype": "float32"}, {"name": "78", "dtype": "float32"}, {"name": "79", "dtype": "float32"}, {"name": "80", "dtype": "float32"}, {"name": "81", "dtype": "float32"}, {"name": "82", "dtype": "float32"}, {"name": "83", "dtype": "float32"}, {"name": "84", "dtype": "float32"}, {"name": "85", "dtype": "float32"}, {"name": "86", "dtype": "float32"}, {"name": "87", "dtype": "float32"}, {"name": "88", "dtype": "float32"}, {"name": "89", "dtype": "float32"}, {"name": "90", "dtype": "float32"}, {"name": "91", "dtype": "float32"}, {"name": "92", "dtype": "float32"}, {"name": "93", "dtype": "float32"}, {"name": "94", "dtype": "float32"}, {"name": "95", "dtype": "float32"}, {"name": "96", "dtype": "float32"}, {"name": "97", "dtype": "float32"}, {"name": "98", "dtype": "float32"}, {"name": "99", "dtype": "float32"}, {"name": "100", "dtype": "float32"}, {"name": "101", "dtype": "float32"}, {"name": "102", "dtype": "float32"}, {"name": "103", "dtype": "float32"}, {"name": "104", "dtype": "float32"}, {"name": "105", "dtype": "float32"}, {"name": "106", "dtype": "float32"}, {"name": "107", "dtype": "float32"}, {"name": "108", "dtype": "float32"}, {"name": "109", "dtype": "float32"}, {"name": "110", "dtype": "float32"}, {"name": "111", "dtype": "float32"}, {"name": "112", "dtype": "float32"}, {"name": "113", "dtype": "float32"}, {"name": "114", "dtype": "float32"}, {"name": "115", "dtype": "float32"}, {"name": "116", "dtype": "float32"}, {"name": "117", "dtype": "float32"}, {"name": "118", "dtype": "float32"}, {"name": "119", "dtype": "float32"}, {"name": "120", "dtype": "float32"}, {"name": "121", "dtype": "float32"}, {"name": "122", "dtype": "float32"}, {"name": "123", "dtype": "float32"}, {"name": "124", "dtype": "float32"}, {"name": "125", "dtype": "float32"}, {"name": "126", "dtype": "float32"}, {"name": "127", "dtype": "float32"}, {"name": "128", "dtype": "float32"}, {"name": "129", "dtype": "float32"}, {"name": "130", "dtype": "float32"}, {"name": "131", "dtype": "float32"}, {"name": "132", "dtype": "float32"}, {"name": "133", "dtype": "float32"}, {"name": "134", "dtype": "float32"}, {"name": "135", "dtype": "float32"}, {"name": "136", "dtype": "float32"}, {"name": "137", "dtype": "float32"}, {"name": "138", "dtype": "float32"}, {"name": "139", "dtype": "float32"}, {"name": "140", "dtype": "float32"}, {"name": "141", "dtype": "float32"}, {"name": "142", "dtype": "float32"}, {"name": "143", "dtype": "float32"}, {"name": "144", "dtype": "float32"}, {"name": "145", "dtype": "float32"}, {"name": "146", "dtype": "float32"}, {"name": "147", "dtype": "float32"}, {"name": "148", "dtype": "float32"}, {"name": "149", "dtype": "float32"}, {"name": "150", "dtype": "float32"}, {"name": "151", "dtype": "float32"}, {"name": "152", "dtype": "float32"}, {"name": "153", "dtype": "float32"}, {"name": "154", "dtype": "float32"}, {"name": "155", "dtype": "float32"}, {"name": "156", "dtype": "float32"}, {"name": "157", "dtype": "float32"}, {"name": "158", "dtype": "float32"}, {"name": "159", "dtype": "float32"}, {"name": "160", "dtype": "float32"}, {"name": "161", "dtype": "float32"}, {"name": "162", "dtype": "float32"}, {"name": "163", "dtype": "float32"}, {"name": "164", "dtype": "float32"}, {"name": "165", "dtype": "float32"}, {"name": "166", "dtype": "float32"}, {"name": "167", "dtype": "float32"}, {"name": "168", "dtype": "float32"}, {"name": "169", "dtype": "float32"}, {"name": "170", "dtype": "float32"}, {"name": "171", "dtype": "float32"}, {"name": "172", "dtype": "float32"}, {"name": "173", "dtype": "float32"}, {"name": "174", "dtype": "float32"}, {"name": "175", "dtype": "float32"}, {"name": "176", "dtype": "float32"}, {"name": "177", "dtype": "float32"}, {"name": "178", "dtype": "float32"}, {"name": "179", "dtype": "float32"}, {"name": "180", "dtype": "float32"}, {"name": "181", "dtype": "float32"}, {"name": "182", "dtype": "float32"}, {"name": "183", "dtype": "float32"}, {"name": "184", "dtype": "float32"}, {"name": "185", "dtype": "float32"}, {"name": "186", "dtype": "float32"}, {"name": "187", "dtype": "float32"}, {"name": "188", "dtype": "float32"}, {"name": "189", "dtype": "float32"}, {"name": "190", "dtype": "float32"}, {"name": "191", "dtype": "float32"}, {"name": "192", "dtype": "float32"}, {"name": "193", "dtype": "float32"}, {"name": "194", "dtype": "float32"}, {"name": "195", "dtype": "float32"}, {"name": "196", "dtype": "float32"}, {"name": "197", "dtype": "float32"}, {"name": "198", "dtype": "float32"}, {"name": "199", "dtype": "float32"}, {"name": "200", "dtype": "float32"}, {"name": "201", "dtype": "float32"}, {"name": "202", "dtype": "float32"}, {"name": "203", "dtype": "float32"}, {"name": "204", "dtype": "float32"}, {"name": "205", "dtype": "float32"}, {"name": "206", "dtype": "float32"}, {"name": "207", "dtype": "float32"}, {"name": "208", "dtype": "float32"}, {"name": "209", "dtype": "float32"}, {"name": "210", "dtype": "float32"}, {"name": "211", "dtype": "float32"}, {"name": "212", "dtype": "float32"}, {"name": "213", "dtype": "float32"}, {"name": "214", "dtype": "float32"}, {"name": "215", "dtype": "float32"}, {"name": "216", "dtype": "float32"}, {"name": "217", "dtype": "float32"}, {"name": "218", "dtype": "float32"}, {"name": "219", "dtype": "float32"}, {"name": "220", "dtype": "float32"}, {"name": "221", "dtype": "float32"}, {"name": "222", "dtype": "float32"}, {"name": "223", "dtype": "float32"}, {"name": "224", "dtype": "float32"}, {"name": "225", "dtype": "float32"}, {"name": "226", "dtype": "float32"}, {"name": "227", "dtype": "float32"}, {"name": "228", "dtype": "float32"}, {"name": "229", "dtype": "float32"}, {"name": "230", "dtype": "float32"}, {"name": "231", "dtype": "float32"}, {"name": "232", "dtype": "float32"}, {"name": "233", "dtype": "float32"}, {"name": "234", "dtype": "float32"}, {"name": "235", "dtype": "float32"}, {"name": "236", "dtype": "float32"}, {"name": "237", "dtype": "float32"}, {"name": "238", "dtype": "float32"}, {"name": "239", "dtype": "float32"}, {"name": "240", "dtype": "float32"}, {"name": "241", "dtype": "float32"}, {"name": "242", "dtype": "float32"}, {"name": "243", "dtype": "float32"}, {"name": "244", "dtype": "float32"}, {"name": "245", "dtype": "float32"}, {"name": "246", "dtype": "float32"}, {"name": "247", "dtype": "float32"}, {"name": "248", "dtype": "float32"}, {"name": "249", "dtype": "float32"}, {"name": "250", "dtype": "float32"}, {"name": "251", "dtype": "float32"}, {"name": "252", "dtype": "float32"}, {"name": "253", "dtype": "float32"}, {"name": "254", "dtype": "float32"}, {"name": "255", "dtype": "float32"}, {"name": "256", "dtype": "float32"}, {"name": "257", "dtype": "float32"}, {"name": "258", "dtype": "float32"}, {"name": "259", "dtype": "float32"}, {"name": "260", "dtype": "float32"}, {"name": "261", "dtype": "float32"}, {"name": "262", "dtype": "float32"}, {"name": "263", "dtype": "float32"}, {"name": "264", "dtype": "float32"}, {"name": "265", "dtype": "float32"}, {"name": "266", "dtype": "float32"}, {"name": "267", "dtype": "float32"}, {"name": "268", "dtype": "float32"}, {"name": "269", "dtype": "float32"}, {"name": "270", "dtype": "float32"}, {"name": "271", "dtype": "float32"}, {"name": "272", "dtype": "float32"}, {"name": "273", "dtype": "float32"}, {"name": "274", "dtype": "float32"}, {"name": "275", "dtype": "float32"}, {"name": "276", "dtype": "float32"}, {"name": "277", "dtype": "float32"}, {"name": "278", "dtype": "float32"}, {"name": "279", "dtype": "float32"}, {"name": "280", "dtype": "float32"}, {"name": "281", "dtype": "float32"}, {"name": "282", "dtype": "float32"}, {"name": "283", "dtype": "float32"}, {"name": "284", "dtype": "float32"}, {"name": "285", "dtype": "float32"}, {"name": "286", "dtype": "float32"}, {"name": "287", "dtype": "float32"}, {"name": "288", "dtype": "float32"}, {"name": "289", "dtype": "float32"}, {"name": "290", "dtype": "float32"}, {"name": "291", "dtype": "float32"}, {"name": "292", "dtype": "float32"}, {"name": "293", "dtype": "float32"}, {"name": "294", "dtype": "float32"}, {"name": "295", "dtype": "float32"}, {"name": "296", "dtype": "float32"}, {"name": "297", "dtype": "float32"}, {"name": "298", "dtype": "float32"}, {"name": "299", "dtype": "float32"}, {"name": "300", "dtype": "float32"}, {"name": "301", "dtype": "float32"}, {"name": "302", "dtype": "float32"}, {"name": "303", "dtype": "float32"}, {"name": "304", "dtype": "float32"}, {"name": "305", "dtype": "float32"}, {"name": "306", "dtype": "float32"}, {"name": "307", "dtype": "float32"}, {"name": "308", "dtype": "float32"}, {"name": "309", "dtype": "float32"}, {"name": "310", "dtype": "float32"}, {"name": "311", "dtype": "float32"}, {"name": "312", "dtype": "float32"}, {"name": "313", "dtype": "float32"}, {"name": "314", "dtype": "float32"}, {"name": "315", "dtype": "float32"}, {"name": "316", "dtype": "float32"}, {"name": "317", "dtype": "float32"}, {"name": "318", "dtype": "float32"}, {"name": "319", "dtype": "float32"}, {"name": "320", "dtype": "float32"}, {"name": "321", "dtype": "float32"}, {"name": "322", "dtype": "float32"}, {"name": "323", "dtype": "float32"}, {"name": "324", "dtype": "float32"}, {"name": "325", "dtype": "float32"}, {"name": "326", "dtype": "float32"}, {"name": "327", "dtype": "float32"}, {"name": "328", "dtype": "float32"}, {"name": "329", "dtype": "float32"}, {"name": "330", "dtype": "float32"}, {"name": "331", "dtype": "float32"}, {"name": "332", "dtype": "float32"}, {"name": "333", "dtype": "float32"}, {"name": "334", "dtype": "float32"}, {"name": "335", "dtype": "float32"}, {"name": "336", "dtype": "float32"}, {"name": "337", "dtype": "float32"}, {"name": "338", "dtype": "float32"}, {"name": "339", "dtype": "float32"}, {"name": "340", "dtype": "float32"}, {"name": "341", "dtype": "float32"}, {"name": "342", "dtype": "float32"}, {"name": "343", "dtype": "float32"}, {"name": "344", "dtype": "float32"}, {"name": "345", "dtype": "float32"}, {"name": "346", "dtype": "float32"}, {"name": "347", "dtype": "float32"}, {"name": "348", "dtype": "float32"}, {"name": "349", "dtype": "float32"}, {"name": "350", "dtype": "float32"}, {"name": "351", "dtype": "float32"}, {"name": "352", "dtype": "float32"}, {"name": "353", "dtype": "float32"}, {"name": "354", "dtype": "float32"}, {"name": "355", "dtype": "float32"}, {"name": "356", "dtype": "float32"}, {"name": "357", "dtype": "float32"}, {"name": "358", "dtype": "float32"}, {"name": "359", "dtype": "float32"}, {"name": "360", "dtype": "float32"}, {"name": "361", "dtype": "float32"}, {"name": "362", "dtype": "float32"}, {"name": "363", "dtype": "float32"}, {"name": "364", "dtype": "float32"}, {"name": "365", "dtype": "float32"}, {"name": "366", "dtype": "float32"}, {"name": "367", "dtype": "float32"}, {"name": "368", "dtype": "float32"}, {"name": "369", "dtype": "float32"}, {"name": "370", "dtype": "float32"}, {"name": "371", "dtype": "float32"}, {"name": "372", "dtype": "float32"}, {"name": "373", "dtype": "float32"}, {"name": "374", "dtype": "float32"}, {"name": "375", "dtype": "float32"}, {"name": "376", "dtype": "float32"}, {"name": "377", "dtype": "float32"}, {"name": "378", "dtype": "float32"}, {"name": "379", "dtype": "float32"}, {"name": "380", "dtype": "float32"}, {"name": "381", "dtype": "float32"}, {"name": "382", "dtype": "float32"}, {"name": "383", "dtype": "float32"}, {"name": "384", "dtype": "float32"}, {"name": "385", "dtype": "float32"}, {"name": "386", "dtype": "float32"}, {"name": "387", "dtype": "float32"}, {"name": "388", "dtype": "float32"}, {"name": "389", "dtype": "float32"}, {"name": "390", "dtype": "float32"}, {"name": "391", "dtype": "float32"}, {"name": "392", "dtype": "float32"}, {"name": "393", "dtype": "float32"}, {"name": "394", "dtype": "float32"}, {"name": "395", "dtype": "float32"}, {"name": "396", "dtype": "float32"}, {"name": "397", "dtype": "float32"}, {"name": "398", "dtype": "float32"}, {"name": "399", "dtype": "float32"}, {"name": "400", "dtype": "float32"}, {"name": "401", "dtype": "float32"}, {"name": "402", "dtype": "float32"}, {"name": "403", "dtype": "float32"}, {"name": "404", "dtype": "float32"}, {"name": "405", "dtype": "float32"}, {"name": "406", "dtype": "float32"}, {"name": "407", "dtype": "float32"}, {"name": "408", "dtype": "float32"}, {"name": "409", "dtype": "float32"}, {"name": "410", "dtype": "float32"}, {"name": "411", "dtype": "float32"}, {"name": "412", "dtype": "float32"}, {"name": "413", "dtype": "float32"}, {"name": "414", "dtype": "float32"}, {"name": "415", "dtype": "float32"}, {"name": "416", "dtype": "float32"}, {"name": "417", "dtype": "float32"}, {"name": "418", "dtype": "float32"}, {"name": "419", "dtype": "float32"}, {"name": "420", "dtype": "float32"}, {"name": "421", "dtype": "float32"}, {"name": "422", "dtype": "float32"}, {"name": "423", "dtype": "float32"}, {"name": "424", "dtype": "float32"}, {"name": "425", "dtype": "float32"}, {"name": "426", "dtype": "float32"}, {"name": "427", "dtype": "float32"}, {"name": "428", "dtype": "float32"}, {"name": "429", "dtype": "float32"}, {"name": "430", "dtype": "float32"}, {"name": "431", "dtype": "float32"}, {"name": "432", "dtype": "float32"}, {"name": "433", "dtype": "float32"}, {"name": "434", "dtype": "float32"}, {"name": "435", "dtype": "float32"}, {"name": "436", "dtype": "float32"}, {"name": "437", "dtype": "float32"}, {"name": "438", "dtype": "float32"}, {"name": "439", "dtype": "float32"}, {"name": "440", "dtype": "float32"}, {"name": "441", "dtype": "float32"}, {"name": "442", "dtype": "float32"}, {"name": "443", "dtype": "float32"}, {"name": "444", "dtype": "float32"}, {"name": "445", "dtype": "float32"}, {"name": "446", "dtype": "float32"}, {"name": "447", "dtype": "float32"}, {"name": "448", "dtype": "float32"}, {"name": "449", "dtype": "float32"}, {"name": "450", "dtype": "float32"}, {"name": "451", "dtype": "float32"}, {"name": "452", "dtype": "float32"}, {"name": "453", "dtype": "float32"}, {"name": "454", "dtype": "float32"}, {"name": "455", "dtype": "float32"}, {"name": "456", "dtype": "float32"}, {"name": "457", "dtype": "float32"}, {"name": "458", "dtype": "float32"}, {"name": "459", "dtype": "float32"}, {"name": "460", "dtype": "float32"}, {"name": "461", "dtype": "float32"}, {"name": "462", "dtype": "float32"}, {"name": "463", "dtype": "float32"}, {"name": "464", "dtype": "float32"}, {"name": "465", "dtype": "float32"}, {"name": "466", "dtype": "float32"}, {"name": "467", "dtype": "float32"}, {"name": "468", "dtype": "float32"}, {"name": "469", "dtype": "float32"}, {"name": "470", "dtype": "float32"}, {"name": "471", "dtype": "float32"}, {"name": "472", "dtype": "float32"}, {"name": "473", "dtype": "float32"}, {"name": "474", "dtype": "float32"}, {"name": "475", "dtype": "float32"}, {"name": "476", "dtype": "float32"}, {"name": "477", "dtype": "float32"}, {"name": "478", "dtype": "float32"}, {"name": "479", "dtype": "float32"}, {"name": "480", "dtype": "float32"}, {"name": "481", "dtype": "float32"}, {"name": "482", "dtype": "float32"}, {"name": "483", "dtype": "float32"}, {"name": "484", "dtype": "float32"}, {"name": "485", "dtype": "float32"}, {"name": "486", "dtype": "float32"}, {"name": "487", "dtype": "float32"}, {"name": "488", "dtype": "float32"}, {"name": "489", "dtype": "float32"}, {"name": "490", "dtype": "float32"}, {"name": "491", "dtype": "float32"}, {"name": "492", "dtype": "float32"}, {"name": "493", "dtype": "float32"}, {"name": "494", "dtype": "float32"}, {"name": "495", "dtype": "float32"}, {"name": "496", "dtype": "float32"}, {"name": "497", "dtype": "float32"}, {"name": "498", "dtype": "float32"}, {"name": "499", "dtype": "float32"}, {"name": "500", "dtype": "float32"}, {"name": "501", "dtype": "float32"}, {"name": "502", "dtype": "float32"}, {"name": "503", "dtype": "float32"}, {"name": "504", "dtype": "float32"}, {"name": "505", "dtype": "float32"}, {"name": "506", "dtype": "float32"}, {"name": "507", "dtype": "float32"}, {"name": "508", "dtype": "float32"}, {"name": "509", "dtype": "float32"}, {"name": "510", "dtype": "float32"}, {"name": "511", "dtype": "float32"}, {"name": "512", "dtype": "float32"}, {"name": "513", "dtype": "float32"}, {"name": "514", "dtype": "float32"}, {"name": "515", "dtype": "float32"}, {"name": "516", "dtype": "float32"}, {"name": "517", "dtype": "float32"}, {"name": "518", "dtype": "float32"}, {"name": "519", "dtype": "float32"}, {"name": "520", "dtype": "float32"}, {"name": "521", "dtype": "float32"}, {"name": "522", "dtype": "float32"}, {"name": "523", "dtype": "float32"}, {"name": "524", "dtype": "float32"}, {"name": "525", "dtype": "float32"}, {"name": "526", "dtype": "float32"}, {"name": "527", "dtype": "float32"}, {"name": "528", "dtype": "float32"}, {"name": "529", "dtype": "float32"}, {"name": "530", "dtype": "float32"}, {"name": "531", "dtype": "float32"}, {"name": "532", "dtype": "float32"}, {"name": "533", "dtype": "float32"}, {"name": "534", "dtype": "float32"}, {"name": "535", "dtype": "float32"}, {"name": "536", "dtype": "float32"}, {"name": "537", "dtype": "float32"}, {"name": "538", "dtype": "float32"}, {"name": "539", "dtype": "float32"}, {"name": "540", "dtype": "float32"}, {"name": "541", "dtype": "float32"}, {"name": "542", "dtype": "float32"}, {"name": "543", "dtype": "float32"}, {"name": "544", "dtype": "float32"}, {"name": "545", "dtype": "float32"}, {"name": "546", "dtype": "float32"}, {"name": "547", "dtype": "float32"}, {"name": "548", "dtype": "float32"}, {"name": "549", "dtype": "float32"}, {"name": "550", "dtype": "float32"}, {"name": "551", "dtype": "float32"}, {"name": "552", "dtype": "float32"}, {"name": "553", "dtype": "float32"}, {"name": "554", "dtype": "float32"}, {"name": "555", "dtype": "float32"}, {"name": "556", "dtype": "float32"}, {"name": "557", "dtype": "float32"}, {"name": "558", "dtype": "float32"}, {"name": "559", "dtype": "float32"}, {"name": "560", "dtype": "float32"}, {"name": "561", "dtype": "float32"}, {"name": "562", "dtype": "float32"}, {"name": "563", "dtype": "float32"}, {"name": "564", "dtype": "float32"}, {"name": "565", "dtype": "float32"}, {"name": "566", "dtype": "float32"}, {"name": "567", "dtype": "float32"}, {"name": "568", "dtype": "float32"}, {"name": "569", "dtype": "float32"}, {"name": "570", "dtype": "float32"}, {"name": "571", "dtype": "float32"}, {"name": "572", "dtype": "float32"}, {"name": "573", "dtype": "float32"}, {"name": "574", "dtype": "float32"}, {"name": "575", "dtype": "float32"}, {"name": "576", "dtype": "float32"}, {"name": "577", "dtype": "float32"}, {"name": "578", "dtype": "float32"}, {"name": "579", "dtype": "float32"}, {"name": "580", "dtype": "float32"}, {"name": "581", "dtype": "float32"}, {"name": "582", "dtype": "float32"}, {"name": "583", "dtype": "float32"}, {"name": "584", "dtype": "float32"}, {"name": "585", "dtype": "float32"}, {"name": "586", "dtype": "float32"}, {"name": "587", "dtype": "float32"}, {"name": "588", "dtype": "float32"}, {"name": "589", "dtype": "float32"}, {"name": "590", "dtype": "float32"}, {"name": "591", "dtype": "float32"}, {"name": "592", "dtype": "float32"}, {"name": "593", "dtype": "float32"}, {"name": "594", "dtype": "float32"}, {"name": "595", "dtype": "float32"}, {"name": "596", "dtype": "float32"}, {"name": "597", "dtype": "float32"}, {"name": "598", "dtype": "float32"}, {"name": "599", "dtype": "float32"}, {"name": "600", "dtype": "float32"}, {"name": "601", "dtype": "float32"}, {"name": "602", "dtype": "float32"}, {"name": "603", "dtype": "float32"}, {"name": "604", "dtype": "float32"}, {"name": "605", "dtype": "float32"}, {"name": "606", "dtype": "float32"}, {"name": "607", "dtype": "float32"}, {"name": "608", "dtype": "float32"}, {"name": "609", "dtype": "float32"}, {"name": "610", "dtype": "float32"}, {"name": "611", "dtype": "float32"}, {"name": "612", "dtype": "float32"}, {"name": "613", "dtype": "float32"}, {"name": "614", "dtype": "float32"}, {"name": "615", "dtype": "float32"}, {"name": "616", "dtype": "float32"}, {"name": "617", "dtype": "float32"}, {"name": "618", "dtype": "float32"}, {"name": "619", "dtype": "float32"}, {"name": "620", "dtype": "float32"}, {"name": "621", "dtype": "float32"}, {"name": "622", "dtype": "float32"}, {"name": "623", "dtype": "float32"}, {"name": "624", "dtype": "float32"}, {"name": "625", "dtype": "float32"}, {"name": "626", "dtype": "float32"}, {"name": "627", "dtype": "float32"}, {"name": "628", "dtype": "float32"}, {"name": "629", "dtype": "float32"}, {"name": "630", "dtype": "float32"}, {"name": "631", "dtype": "float32"}, {"name": "632", "dtype": "float32"}, {"name": "633", "dtype": "float32"}, {"name": "634", "dtype": "float32"}, {"name": "635", "dtype": "float32"}, {"name": "636", "dtype": "float32"}, {"name": "637", "dtype": "float32"}, {"name": "638", "dtype": "float32"}, {"name": "639", "dtype": "float32"}, {"name": "640", "dtype": "float32"}, {"name": "641", "dtype": "float32"}, {"name": "642", "dtype": "float32"}, {"name": "643", "dtype": "float32"}, {"name": "644", "dtype": "float32"}, {"name": "645", "dtype": "float32"}, {"name": "646", "dtype": "float32"}, {"name": "647", "dtype": "float32"}, {"name": "648", "dtype": "float32"}, {"name": "649", "dtype": "float32"}, {"name": "650", "dtype": "float32"}, {"name": "651", "dtype": "float32"}, {"name": "652", "dtype": "float32"}, {"name": "653", "dtype": "float32"}, {"name": "654", "dtype": "float32"}, {"name": "655", "dtype": "float32"}, {"name": "656", "dtype": "float32"}, {"name": "657", "dtype": "float32"}, {"name": "658", "dtype": "float32"}, {"name": "659", "dtype": "float32"}, {"name": "660", "dtype": "float32"}, {"name": "661", "dtype": "float32"}, {"name": "662", "dtype": "float32"}, {"name": "663", "dtype": "float32"}, {"name": "664", "dtype": "float32"}, {"name": "665", "dtype": "float32"}, {"name": "666", "dtype": "float32"}, {"name": "667", "dtype": "float32"}, {"name": "668", "dtype": "float32"}, {"name": "669", "dtype": "float32"}, {"name": "670", "dtype": "float32"}, {"name": "671", "dtype": "float32"}, {"name": "672", "dtype": "float32"}, {"name": "673", "dtype": "float32"}, {"name": "674", "dtype": "float32"}, {"name": "675", "dtype": "float32"}, {"name": "676", "dtype": "float32"}, {"name": "677", "dtype": "float32"}, {"name": "678", "dtype": "float32"}, {"name": "679", "dtype": "float32"}, {"name": "680", "dtype": "float32"}, {"name": "681", "dtype": "float32"}, {"name": "682", "dtype": "float32"}, {"name": "683", "dtype": "float32"}, {"name": "684", "dtype": "float32"}, {"name": "685", "dtype": "float32"}, {"name": "686", "dtype": "float32"}, {"name": "687", "dtype": "float32"}, {"name": "688", "dtype": "float32"}, {"name": "689", "dtype": "float32"}, {"name": "690", "dtype": "float32"}, {"name": "691", "dtype": "float32"}, {"name": "692", "dtype": "float32"}, {"name": "693", "dtype": "float32"}, {"name": "694", "dtype": "float32"}, {"name": "695", "dtype": "float32"}, {"name": "696", "dtype": "float32"}, {"name": "697", "dtype": "float32"}, {"name": "698", "dtype": "float32"}, {"name": "699", "dtype": "float32"}, {"name": "700", "dtype": "float32"}, {"name": "701", "dtype": "float32"}, {"name": "702", "dtype": "float32"}, {"name": "703", "dtype": "float32"}, {"name": "704", "dtype": "float32"}, {"name": "705", "dtype": "float32"}, {"name": "706", "dtype": "float32"}, {"name": "707", "dtype": "float32"}, {"name": "708", "dtype": "float32"}, {"name": "709", "dtype": "float32"}, {"name": "710", "dtype": "float32"}, {"name": "711", "dtype": "float32"}, {"name": "712", "dtype": "float32"}, {"name": "713", "dtype": "float32"}, {"name": "714", "dtype": "float32"}, {"name": "715", "dtype": "float32"}, {"name": "716", "dtype": "float32"}, {"name": "717", "dtype": "float32"}, {"name": "718", "dtype": "float32"}, {"name": "719", "dtype": "float32"}, {"name": "720", "dtype": "float32"}, {"name": "721", "dtype": "float32"}, {"name": "722", "dtype": "float32"}, {"name": "723", "dtype": "float32"}, {"name": "724", "dtype": "float32"}, {"name": "725", "dtype": "float32"}, {"name": "726", "dtype": "float32"}, {"name": "727", "dtype": "float32"}, {"name": "728", "dtype": "float32"}, {"name": "729", "dtype": "float32"}, {"name": "730", "dtype": "float32"}, {"name": "731", "dtype": "float32"}, {"name": "732", "dtype": "float32"}, {"name": "733", "dtype": "float32"}, {"name": "734", "dtype": "float32"}, {"name": "735", "dtype": "float32"}, {"name": "736", "dtype": "float32"}, {"name": "737", "dtype": "float32"}, {"name": "738", "dtype": "float32"}, {"name": "739", "dtype": "float32"}, {"name": "740", "dtype": "float32"}, {"name": "741", "dtype": "float32"}, {"name": "742", "dtype": "float32"}, {"name": "743", "dtype": "float32"}, {"name": "744", "dtype": "float32"}, {"name": "745", "dtype": "float32"}, {"name": "746", "dtype": "float32"}, {"name": "747", "dtype": "float32"}, {"name": "748", "dtype": "float32"}, {"name": "749", "dtype": "float32"}, {"name": "750", "dtype": "float32"}, {"name": "751", "dtype": "float32"}, {"name": "752", "dtype": "float32"}, {"name": "753", "dtype": "float32"}, {"name": "754", "dtype": "float32"}, {"name": "755", "dtype": "float32"}, {"name": "756", "dtype": "float32"}, {"name": "757", "dtype": "float32"}, {"name": "758", "dtype": "float32"}, {"name": "759", "dtype": "float32"}, {"name": "760", "dtype": "float32"}, {"name": "761", "dtype": "float32"}, {"name": "762", "dtype": "float32"}, {"name": "763", "dtype": "float32"}, {"name": "764", "dtype": "float32"}, {"name": "765", "dtype": "float32"}, {"name": "766", "dtype": "float32"}, {"name": "767", "dtype": "float32"}, {"name": "label", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 115582709.0625, "num_examples": 37500}, {"name": "test", "num_bytes": 38527570.0, "num_examples": 12500}], "download_size": 211883223, "dataset_size": 154110279.0625}}
2023-08-18T14:13:22+00:00
[]
[]
TAGS #region-us
# Dataset Card for "BGL_RoBERTa_Baseline" More Information needed
[ "# Dataset Card for \"BGL_RoBERTa_Baseline\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"BGL_RoBERTa_Baseline\"\n\nMore Information needed" ]
[ 6, 19 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"BGL_RoBERTa_Baseline\"\n\nMore Information needed" ]
0d0c10b59a9ec72e7f925755a777847c69ac5ec9
# Dataset Card for medical 中文医疗数据集 - LLM Supervised Finetuning repository: https://github.com/shibing624/textgen - MeidcalGPT repository: https://github.com/shibing624/MedicalGPT ## Dataset Description medical is a Chinese Medical dataset. 医疗数据集,可用于医疗领域大模型训练。 ``` tree medical |-- finetune # 监督微调数据集,可用于SFT和RLHF | |-- test_en_1.json | |-- test_zh_0.json | |-- train_en_1.json | |-- train_zh_0.json | |-- valid_en_1.json | `-- valid_zh_0.json |-- medical.py # hf dataset 数据展示用 |-- pretrain # 二次预训练数据集 | |-- medical_book_zh.json | |-- test_encyclopedia.json | |-- train_encyclopedia.json | `-- valid_encyclopedia.json |-- README.md `-- reward # 奖励模型数据集 |-- test.json |-- train.json `-- valid.json ``` ### Original Dataset Summary #### pretrain - train_encyclopedia.json: 共36万条,来自医疗百科数据[FreedomIntelligence/huatuo_encyclopedia_qa](https://huggingface.co/datasets/FreedomIntelligence/huatuo_encyclopedia_qa) , 拼接 questions 和 answers,形成 text 文本字段,语句通顺,用于预训练注入医疗知识。 - medical_book_zh.json: 共8475条,来自医疗教材的文本数据,来源:https://github.com/jind11/MedQA, 原始数据集:[google drive](https://drive.google.com/u/0/uc?export=download&confirm=t&id=1ImYUSLk9JbgHXOemfvyiDiirluZHPeQw) ,只对长段落切分为2048字的小段落了。 #### finetune - train_zh_0.json: 共195万条,来自1)中文医疗对话数据集[Toyhom/Chinese-medical-dialogue-data](https://github.com/Toyhom/Chinese-medical-dialogue-data)的六个科室医疗问诊数据, 有79万条;2)在线医疗百科 huatuo_encyclopedia_qa ,有36万条;3)医疗知识图谱 huatuo_knowledge_graph_qa,有79万条。三部分合并,共195万条。 - train_en_1.json:共11万条,来自英文医疗问诊对话数据[Kent0n-Li/ChatDoctor](https://github.com/Kent0n-Li/ChatDoctor),合并了HealthCareMagic-100k、GenMedGPT-5k 数据集,共11万条。 #### reward - train.json 共4000条,问题来自中文医疗对话数据集[Toyhom/Chinese-medical-dialogue-data](https://github.com/Toyhom/Chinese-medical-dialogue-data)的随机4000条提问,`response_chosen`来自该数据集的医生答复, `response_rejected`来自本草模型[SCIR-HI/Huatuo-Llama-Med-Chinese](https://github.com/SCIR-HI/Huatuo-Llama-Med-Chinese)的答复。 ### Supported Tasks and Leaderboards 中文医疗对话模型 The dataset designed for medical task training pretrained language models. ### Languages The data are in Chinese. ## Dataset Structure ### Data Instances An example of "train" looks as follows: head pretrain/train_encyclopedia.json ```json {"text": "怀孕后嘴巴很淡怎么办?有孕妇在怀孕之后,发现自己嘴巴比较淡,出现这种情况的原因其实也非常的复杂,首先和妊娠反应有直接的关系,这是一种正常的情况,另外有些孕妇平常不注意口腔卫生,舌苔比较厚或者自己有了一些消化系统方面的疾病,这就要求人们必须要及时的进行处理。女性在怀孕之后,身体就会出现一些明显的变化,首先人们月经会停止,另外也会有恶心、呕吐等一些妊娠反应,不过这些都是正常的。有些孕妇发现自己在怀孕之后,口味发生了很大的变化,嘴巴变得非常的淡。其实这也和激素变化有直接的关系,可能是妊娠反应所致,在怀孕期间,因为受到体内激素水平的变化,所以就会有肠胃系统的改变,人们可能会出现食欲不振,消化不良等症状表现,也有一些孕妇会发现自己嘴巴没有味道,会有口苦的症状,而这也是正常的孕期反应,人们在平常要多喝一些水,多吃一些清淡营养的食物慢慢就会改善。也有可能是舌苔过厚所致,孕妇嘴巴里面没有味道,很有可能是舌苔比较重、舌苔过厚导致的,这样就会影响到味蕾对味道的敏感度,不仅嘴巴里面没有味道,甚至有些孕妇在说话的时候也会有口臭,这就要求人们在每天早上漱口的时候,必须要用牙刷刷舌苔开始,不要清理的特别深,以免人们会有呕吐,慢慢习惯之后再往深一点的清洗,一般2到3天就会得到改善。嘴巴感到比较淡,其实也和脾胃虚寒有直接的关系,消化系统疾病,内分泌疾病,营养不良等,但有可能导致舌头部位因为味蕾的敏感度下降,产生口淡之感,患者会有食欲不振的表现,发现病症及时就诊治疗。"} ``` head finetune/train_zh_0.json ```json {"instruction": "曲匹地尔片的用法用量", "input": "", "output": "注意:同种药品可由于不同的包装规格有不同的用法或用量。本文只供参考。如果不确定,请参看药品随带的说明书或向医生询问。口服。一次50~100mg(1-2片),3次/日,或遵医嘱。"} ``` head finetune/train_en_1.json ```json {"instruction": "If you are a doctor, please answer the medical questions based on the patient's description.", "input": "Doctor, I have been experiencing sudden and frequent panic attacks. I don't know what to do.", "output": "Well, based on what you're telling me, it sounds like you may be suffering from panic disorder. The best course of action is to start with psychotherapy and mental health counseling. Additionally, we should conduct an electrocardiogram to make sure that there are no physical issues causing your panic attacks. We will also need to perform a depression screen and a toxicology screen to rule out any other underlying causes. Finally, I would recommend a comprehensive psychological and psychiatric evaluation and therapy to help manage your symptoms."} ``` head reward/train.json ```json {"question": "孩子感统失调,学习不行怎么办?,孩子说话晚,走路晚,现在小学二年级,学习跟不上,理解力差,运动协调性差,家里很着急,不知怎么办。", "response_chosen": "病情分析:你好!孩子说话晚,走路也晚,很可能是大脑本身发育不好引起的发育迟缓。而五岁时所致的智力检查为临界范围,那就是说孩子的智商是有问题的,也应考虑与大脑发育不好有关。指导意见:人的大脑在头一年发育最快,可塑性最强,在头三年可塑性还是可以的,超过三岁再进行训练,效果就不怎么好了。建议再给孩子做一做智力测试,如果孩子的智商还是在临界范围,那就要考虑让孩子去特殊学校进行康复训练,而不是继续在普通小学就读,否则对孩子来说,就是强人所难了。希望自己的孩子能聪明,这是每个家长都会有的心愿,但如果孩子自身的条件就是不能跟上同龄孩子,那家长也要面对这个事实的,对吗?医生询问:", "response_rejected": "建议家长先带孩子去正规医院做全面检查以确定病因和病情严重程度;同时可以进行物理治疗、康复训练等辅助治疗方法。"} ``` ### Data Fields #### 预训练数据集 pretrain 字段解释: - text: 文本 #### 指令微调数据集 finetune 字段解释: - instruction: 指令 - input:问题(可为空) - output:答复 #### 奖励模型数据集 reward 字段解释: - question: 问题 - response_chosen: 优质回答 - response_rejected: 低质回答 ### Data Splits ``` > wc -l medical/*/* 500 medical/finetune/test_en_1.json 500 medical/finetune/test_zh_0.json 116617 medical/finetune/train_en_1.json 1949972 medical/finetune/train_zh_0.json 500 medical/finetune/valid_en_1.json 500 medical/finetune/valid_zh_0.json 8475 medical/pretrain/medical_book_zh.json 500 medical/pretrain/test_encyclopedia.json 361420 medical/pretrain/train_encyclopedia.json 500 medical/pretrain/valid_encyclopedia.json 100 medical/reward/test.json 3800 medical/reward/train.json 100 medical/reward/valid.json 2443484 total ``` ### Licensing Information The dataset is available under the Apache 2.0. ### Citation Information - https://github.com/Toyhom/Chinese-medical-dialogue-data - https://github.com/FreedomIntelligence/Huatuo-26M/blob/main/README_zh-CN.md - https://huggingface.co/datasets/FreedomIntelligence/huatuo_encyclopedia_qa - https://huggingface.co/datasets/FreedomIntelligence/huatuo_knowledge_graph_qa - https://github.com/Kent0n-Li/ChatDoctor 附上几个优质的reward model dataset: - https://huggingface.co/datasets/Dahoas/synthetic-instruct-gptj-pairwise - https://huggingface.co/datasets/sunzeyeah/chinese_chatgpt_corpus - https://huggingface.co/datasets/Cohere/miracl-zh-queries-22-12 - https://huggingface.co/datasets/Dahoas/rm-static ### Contributions [shibing624](https://github.com/shibing624) 整理并上传
ticoAg/shibing624-medical-pretrain
[ "task_categories:text-generation", "size_categories:1M<n<10M", "language:zh", "language:en", "license:apache-2.0", "text-generation", "region:us" ]
2023-08-18T13:34:28+00:00
{"language": ["zh", "en"], "license": "apache-2.0", "size_categories": ["1M<n<10M"], "task_categories": ["text-generation"], "pretty_name": "medical", "tags": ["text-generation"]}
2023-08-18T13:37:28+00:00
[]
[ "zh", "en" ]
TAGS #task_categories-text-generation #size_categories-1M<n<10M #language-Chinese #language-English #license-apache-2.0 #text-generation #region-us
# Dataset Card for medical 中文医疗数据集 - LLM Supervised Finetuning repository: URL - MeidcalGPT repository: URL ## Dataset Description medical is a Chinese Medical dataset. 医疗数据集,可用于医疗领域大模型训练。 ### Original Dataset Summary #### pretrain - train_encyclopedia.json: 共36万条,来自医疗百科数据FreedomIntelligence/huatuo_encyclopedia_qa , 拼接 questions 和 answers,形成 text 文本字段,语句通顺,用于预训练注入医疗知识。 - medical_book_zh.json: 共8475条,来自医疗教材的文本数据,来源:URL, 原始数据集:google drive ,只对长段落切分为2048字的小段落了。 #### finetune - train_zh_0.json: 共195万条,来自1)中文医疗对话数据集Toyhom/Chinese-medical-dialogue-data的六个科室医疗问诊数据, 有79万条;2)在线医疗百科 huatuo_encyclopedia_qa ,有36万条;3)医疗知识图谱 huatuo_knowledge_graph_qa,有79万条。三部分合并,共195万条。 - train_en_1.json:共11万条,来自英文医疗问诊对话数据Kent0n-Li/ChatDoctor,合并了HealthCareMagic-100k、GenMedGPT-5k 数据集,共11万条。 #### reward - URL 共4000条,问题来自中文医疗对话数据集Toyhom/Chinese-medical-dialogue-data的随机4000条提问,'response_chosen'来自该数据集的医生答复, 'response_rejected'来自本草模型SCIR-HI/Huatuo-Llama-Med-Chinese的答复。 ### Supported Tasks and Leaderboards 中文医疗对话模型 The dataset designed for medical task training pretrained language models. ### Languages The data are in Chinese. ## Dataset Structure ### Data Instances An example of "train" looks as follows: head pretrain/train_encyclopedia.json head finetune/train_zh_0.json head finetune/train_en_1.json head reward/URL ### Data Fields #### 预训练数据集 pretrain 字段解释: - text: 文本 #### 指令微调数据集 finetune 字段解释: - instruction: 指令 - input:问题(可为空) - output:答复 #### 奖励模型数据集 reward 字段解释: - question: 问题 - response_chosen: 优质回答 - response_rejected: 低质回答 ### Data Splits ### Licensing Information The dataset is available under the Apache 2.0. - URL - URL - URL - URL - URL 附上几个优质的reward model dataset: - URL - URL - URL - URL ### Contributions shibing624 整理并上传
[ "# Dataset Card for medical\n中文医疗数据集\n\n- LLM Supervised Finetuning repository: URL\n- MeidcalGPT repository: URL", "## Dataset Description\n\nmedical is a Chinese Medical dataset. 医疗数据集,可用于医疗领域大模型训练。", "### Original Dataset Summary", "#### pretrain\n- train_encyclopedia.json: 共36万条,来自医疗百科数据FreedomIntelligence/huatuo_encyclopedia_qa , 拼接 questions 和 answers,形成 text 文本字段,语句通顺,用于预训练注入医疗知识。\n- medical_book_zh.json: 共8475条,来自医疗教材的文本数据,来源:URL, 原始数据集:google drive ,只对长段落切分为2048字的小段落了。", "#### finetune\n- train_zh_0.json: 共195万条,来自1)中文医疗对话数据集Toyhom/Chinese-medical-dialogue-data的六个科室医疗问诊数据,\n有79万条;2)在线医疗百科 huatuo_encyclopedia_qa ,有36万条;3)医疗知识图谱 huatuo_knowledge_graph_qa,有79万条。三部分合并,共195万条。\n- train_en_1.json:共11万条,来自英文医疗问诊对话数据Kent0n-Li/ChatDoctor,合并了HealthCareMagic-100k、GenMedGPT-5k 数据集,共11万条。", "#### reward\n- URL 共4000条,问题来自中文医疗对话数据集Toyhom/Chinese-medical-dialogue-data的随机4000条提问,'response_chosen'来自该数据集的医生答复,\n'response_rejected'来自本草模型SCIR-HI/Huatuo-Llama-Med-Chinese的答复。", "### Supported Tasks and Leaderboards\n中文医疗对话模型\n\nThe dataset designed for medical task training pretrained language models.", "### Languages\n\nThe data are in Chinese.", "## Dataset Structure", "### Data Instances\n\nAn example of \"train\" looks as follows:\n\nhead pretrain/train_encyclopedia.json\n\n\nhead finetune/train_zh_0.json\n\n\nhead finetune/train_en_1.json\n\n\nhead reward/URL", "### Data Fields", "#### 预训练数据集 pretrain\n字段解释:\n- text: 文本", "#### 指令微调数据集 finetune\n字段解释:\n- instruction: 指令\n- input:问题(可为空)\n- output:答复", "#### 奖励模型数据集 reward\n字段解释:\n- question: 问题\n- response_chosen: 优质回答\n- response_rejected: 低质回答", "### Data Splits", "### Licensing Information\n\nThe dataset is available under the Apache 2.0.\n\n\n\n\n- URL\n- URL\n- URL\n- URL\n- URL\n\n附上几个优质的reward model dataset: \n- URL\n- URL\n- URL\n- URL", "### Contributions\n\nshibing624 整理并上传" ]
[ "TAGS\n#task_categories-text-generation #size_categories-1M<n<10M #language-Chinese #language-English #license-apache-2.0 #text-generation #region-us \n", "# Dataset Card for medical\n中文医疗数据集\n\n- LLM Supervised Finetuning repository: URL\n- MeidcalGPT repository: URL", "## Dataset Description\n\nmedical is a Chinese Medical dataset. 医疗数据集,可用于医疗领域大模型训练。", "### Original Dataset Summary", "#### pretrain\n- train_encyclopedia.json: 共36万条,来自医疗百科数据FreedomIntelligence/huatuo_encyclopedia_qa , 拼接 questions 和 answers,形成 text 文本字段,语句通顺,用于预训练注入医疗知识。\n- medical_book_zh.json: 共8475条,来自医疗教材的文本数据,来源:URL, 原始数据集:google drive ,只对长段落切分为2048字的小段落了。", "#### finetune\n- train_zh_0.json: 共195万条,来自1)中文医疗对话数据集Toyhom/Chinese-medical-dialogue-data的六个科室医疗问诊数据,\n有79万条;2)在线医疗百科 huatuo_encyclopedia_qa ,有36万条;3)医疗知识图谱 huatuo_knowledge_graph_qa,有79万条。三部分合并,共195万条。\n- train_en_1.json:共11万条,来自英文医疗问诊对话数据Kent0n-Li/ChatDoctor,合并了HealthCareMagic-100k、GenMedGPT-5k 数据集,共11万条。", "#### reward\n- URL 共4000条,问题来自中文医疗对话数据集Toyhom/Chinese-medical-dialogue-data的随机4000条提问,'response_chosen'来自该数据集的医生答复,\n'response_rejected'来自本草模型SCIR-HI/Huatuo-Llama-Med-Chinese的答复。", "### Supported Tasks and Leaderboards\n中文医疗对话模型\n\nThe dataset designed for medical task training pretrained language models.", "### Languages\n\nThe data are in Chinese.", "## Dataset Structure", "### Data Instances\n\nAn example of \"train\" looks as follows:\n\nhead pretrain/train_encyclopedia.json\n\n\nhead finetune/train_zh_0.json\n\n\nhead finetune/train_en_1.json\n\n\nhead reward/URL", "### Data Fields", "#### 预训练数据集 pretrain\n字段解释:\n- text: 文本", "#### 指令微调数据集 finetune\n字段解释:\n- instruction: 指令\n- input:问题(可为空)\n- output:答复", "#### 奖励模型数据集 reward\n字段解释:\n- question: 问题\n- response_chosen: 优质回答\n- response_rejected: 低质回答", "### Data Splits", "### Licensing Information\n\nThe dataset is available under the Apache 2.0.\n\n\n\n\n- URL\n- URL\n- URL\n- URL\n- URL\n\n附上几个优质的reward model dataset: \n- URL\n- URL\n- URL\n- URL", "### Contributions\n\nshibing624 整理并上传" ]
[ 51, 36, 25, 7, 117, 163, 86, 29, 10, 6, 59, 5, 19, 35, 37, 5, 46, 13 ]
[ "passage: TAGS\n#task_categories-text-generation #size_categories-1M<n<10M #language-Chinese #language-English #license-apache-2.0 #text-generation #region-us \n# Dataset Card for medical\n中文医疗数据集\n\n- LLM Supervised Finetuning repository: URL\n- MeidcalGPT repository: URL## Dataset Description\n\nmedical is a Chinese Medical dataset. 医疗数据集,可用于医疗领域大模型训练。### Original Dataset Summary#### pretrain\n- train_encyclopedia.json: 共36万条,来自医疗百科数据FreedomIntelligence/huatuo_encyclopedia_qa , 拼接 questions 和 answers,形成 text 文本字段,语句通顺,用于预训练注入医疗知识。\n- medical_book_zh.json: 共8475条,来自医疗教材的文本数据,来源:URL, 原始数据集:google drive ,只对长段落切分为2048字的小段落了。#### finetune\n- train_zh_0.json: 共195万条,来自1)中文医疗对话数据集Toyhom/Chinese-medical-dialogue-data的六个科室医疗问诊数据,\n有79万条;2)在线医疗百科 huatuo_encyclopedia_qa ,有36万条;3)医疗知识图谱 huatuo_knowledge_graph_qa,有79万条。三部分合并,共195万条。\n- train_en_1.json:共11万条,来自英文医疗问诊对话数据Kent0n-Li/ChatDoctor,合并了HealthCareMagic-100k、GenMedGPT-5k 数据集,共11万条。#### reward\n- URL 共4000条,问题来自中文医疗对话数据集Toyhom/Chinese-medical-dialogue-data的随机4000条提问,'response_chosen'来自该数据集的医生答复,\n'response_rejected'来自本草模型SCIR-HI/Huatuo-Llama-Med-Chinese的答复。" ]
de6e067a0506046971e969ea8d61b29414f458de
# Dataset Card for Evaluation run of gpt2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [gpt2](https://huggingface.co/gpt2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 65 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 24 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_gpt2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-22T13:56:20.291666](https://huggingface.co/datasets/open-llm-leaderboard/details_gpt2/blob/main/results_2024-01-22T13-56-20.291666.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.25780579051672486, "acc_stderr": 0.030658881019520554, "acc_norm": 0.2586547713391113, "acc_norm_stderr": 0.031431381356225356, "mc1": 0.22766217870257038, "mc1_stderr": 0.01467925503211107, "mc2": 0.4069116400376613, "mc2_stderr": 0.014934250122346554 }, "harness|arc:challenge|25": { "acc": 0.197098976109215, "acc_stderr": 0.011625047669880633, "acc_norm": 0.22013651877133106, "acc_norm_stderr": 0.01210812488346097 }, "harness|hellaswag|10": { "acc": 0.29267078271260705, "acc_stderr": 0.004540586983229993, "acc_norm": 0.3152758414658435, "acc_norm_stderr": 0.0046367607625228515 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.22962962962962963, "acc_stderr": 0.03633384414073462, "acc_norm": 0.22962962962962963, "acc_norm_stderr": 0.03633384414073462 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.16447368421052633, "acc_stderr": 0.0301675334686327, "acc_norm": 0.16447368421052633, "acc_norm_stderr": 0.0301675334686327 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.17, "acc_stderr": 0.0377525168068637, "acc_norm": 0.17, "acc_norm_stderr": 0.0377525168068637 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.24150943396226415, "acc_stderr": 0.026341480371118345, "acc_norm": 0.24150943396226415, "acc_norm_stderr": 0.026341480371118345 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2222222222222222, "acc_stderr": 0.03476590104304134, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.03476590104304134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.04020151261036846, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036846 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.24277456647398843, "acc_stderr": 0.0326926380614177, "acc_norm": 0.24277456647398843, "acc_norm_stderr": 0.0326926380614177 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.2549019607843137, "acc_stderr": 0.043364327079931785, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.043364327079931785 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.16, "acc_stderr": 0.03684529491774709, "acc_norm": 0.16, "acc_norm_stderr": 0.03684529491774709 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.2723404255319149, "acc_stderr": 0.029101290698386698, "acc_norm": 0.2723404255319149, "acc_norm_stderr": 0.029101290698386698 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.041424397194893624, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.041424397194893624 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135302, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135302 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.25396825396825395, "acc_stderr": 0.022418042891113942, "acc_norm": 0.25396825396825395, "acc_norm_stderr": 0.022418042891113942 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.14285714285714285, "acc_stderr": 0.0312984318574381, "acc_norm": 0.14285714285714285, "acc_norm_stderr": 0.0312984318574381 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.15, "acc_stderr": 0.035887028128263686, "acc_norm": 0.15, "acc_norm_stderr": 0.035887028128263686 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.2967741935483871, "acc_stderr": 0.025988500792411894, "acc_norm": 0.2967741935483871, "acc_norm_stderr": 0.025988500792411894 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.270935960591133, "acc_stderr": 0.03127090713297698, "acc_norm": 0.270935960591133, "acc_norm_stderr": 0.03127090713297698 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.26, "acc_stderr": 0.04408440022768079, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768079 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.35353535353535354, "acc_stderr": 0.03406086723547153, "acc_norm": 0.35353535353535354, "acc_norm_stderr": 0.03406086723547153 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.36787564766839376, "acc_stderr": 0.03480175668466036, "acc_norm": 0.36787564766839376, "acc_norm_stderr": 0.03480175668466036 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.2717948717948718, "acc_stderr": 0.022556551010132358, "acc_norm": 0.2717948717948718, "acc_norm_stderr": 0.022556551010132358 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26296296296296295, "acc_stderr": 0.026842057873833706, "acc_norm": 0.26296296296296295, "acc_norm_stderr": 0.026842057873833706 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.28991596638655465, "acc_stderr": 0.029472485833136098, "acc_norm": 0.28991596638655465, "acc_norm_stderr": 0.029472485833136098 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.271523178807947, "acc_stderr": 0.03631329803969654, "acc_norm": 0.271523178807947, "acc_norm_stderr": 0.03631329803969654 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.3486238532110092, "acc_stderr": 0.020431254090714328, "acc_norm": 0.3486238532110092, "acc_norm_stderr": 0.020431254090714328 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4722222222222222, "acc_stderr": 0.0340470532865388, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.0340470532865388 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.24472573839662448, "acc_stderr": 0.027985699387036416, "acc_norm": 0.24472573839662448, "acc_norm_stderr": 0.027985699387036416 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.2914798206278027, "acc_stderr": 0.030500283176545923, "acc_norm": 0.2914798206278027, "acc_norm_stderr": 0.030500283176545923 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.26717557251908397, "acc_stderr": 0.038808483010823944, "acc_norm": 0.26717557251908397, "acc_norm_stderr": 0.038808483010823944 }, "harness|hendrycksTest-international_law|5": { "acc": 0.32231404958677684, "acc_stderr": 0.04266416363352168, "acc_norm": 0.32231404958677684, "acc_norm_stderr": 0.04266416363352168 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.21296296296296297, "acc_stderr": 0.03957835471980981, "acc_norm": 0.21296296296296297, "acc_norm_stderr": 0.03957835471980981 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.26380368098159507, "acc_stderr": 0.03462419931615623, "acc_norm": 0.26380368098159507, "acc_norm_stderr": 0.03462419931615623 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.25892857142857145, "acc_stderr": 0.041577515398656284, "acc_norm": 0.25892857142857145, "acc_norm_stderr": 0.041577515398656284 }, "harness|hendrycksTest-management|5": { "acc": 0.34951456310679613, "acc_stderr": 0.04721188506097173, "acc_norm": 0.34951456310679613, "acc_norm_stderr": 0.04721188506097173 }, "harness|hendrycksTest-marketing|5": { "acc": 0.1794871794871795, "acc_stderr": 0.025140935950335418, "acc_norm": 0.1794871794871795, "acc_norm_stderr": 0.025140935950335418 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.21583652618135377, "acc_stderr": 0.014711684386139958, "acc_norm": 0.21583652618135377, "acc_norm_stderr": 0.014711684386139958 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.24277456647398843, "acc_stderr": 0.0230836585869842, "acc_norm": 0.24277456647398843, "acc_norm_stderr": 0.0230836585869842 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.014333522059217889, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.014333522059217889 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.21895424836601307, "acc_stderr": 0.02367908986180772, "acc_norm": 0.21895424836601307, "acc_norm_stderr": 0.02367908986180772 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.24758842443729903, "acc_stderr": 0.024513879973621967, "acc_norm": 0.24758842443729903, "acc_norm_stderr": 0.024513879973621967 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.22530864197530864, "acc_stderr": 0.023246202647819746, "acc_norm": 0.22530864197530864, "acc_norm_stderr": 0.023246202647819746 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.26595744680851063, "acc_stderr": 0.026358065698880592, "acc_norm": 0.26595744680851063, "acc_norm_stderr": 0.026358065698880592 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2457627118644068, "acc_stderr": 0.010996156635142692, "acc_norm": 0.2457627118644068, "acc_norm_stderr": 0.010996156635142692 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.44485294117647056, "acc_stderr": 0.030187532060329376, "acc_norm": 0.44485294117647056, "acc_norm_stderr": 0.030187532060329376 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.26143790849673204, "acc_stderr": 0.017776947157528034, "acc_norm": 0.26143790849673204, "acc_norm_stderr": 0.017776947157528034 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03955932861795833, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03955932861795833 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.4, "acc_stderr": 0.031362502409358936, "acc_norm": 0.4, "acc_norm_stderr": 0.031362502409358936 }, "harness|hendrycksTest-sociology|5": { "acc": 0.22885572139303484, "acc_stderr": 0.029705284056772426, "acc_norm": 0.22885572139303484, "acc_norm_stderr": 0.029705284056772426 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.27, "acc_stderr": 0.04461960433384739, "acc_norm": 0.27, "acc_norm_stderr": 0.04461960433384739 }, "harness|hendrycksTest-virology|5": { "acc": 0.1927710843373494, "acc_stderr": 0.030709824050565274, "acc_norm": 0.1927710843373494, "acc_norm_stderr": 0.030709824050565274 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.21052631578947367, "acc_stderr": 0.0312678171466318, "acc_norm": 0.21052631578947367, "acc_norm_stderr": 0.0312678171466318 }, "harness|truthfulqa:mc|0": { "mc1": 0.22766217870257038, "mc1_stderr": 0.01467925503211107, "mc2": 0.4069116400376613, "mc2_stderr": 0.014934250122346554 }, "harness|winogrande|5": { "acc": 0.5043409629044988, "acc_stderr": 0.014051956064076887 }, "harness|gsm8k|5": { "acc": 0.006823351023502654, "acc_stderr": 0.0022675371022544736 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_gpt2
[ "region:us" ]
2023-08-18T13:35:26+00:00
{"pretty_name": "Evaluation run of gpt2", "dataset_summary": "Dataset automatically created during the evaluation run of model [gpt2](https://huggingface.co/gpt2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 65 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 24 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_gpt2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-22T13:56:20.291666](https://huggingface.co/datasets/open-llm-leaderboard/details_gpt2/blob/main/results_2024-01-22T13-56-20.291666.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.25780579051672486,\n \"acc_stderr\": 0.030658881019520554,\n \"acc_norm\": 0.2586547713391113,\n \"acc_norm_stderr\": 0.031431381356225356,\n \"mc1\": 0.22766217870257038,\n \"mc1_stderr\": 0.01467925503211107,\n \"mc2\": 0.4069116400376613,\n \"mc2_stderr\": 0.014934250122346554\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.197098976109215,\n \"acc_stderr\": 0.011625047669880633,\n \"acc_norm\": 0.22013651877133106,\n \"acc_norm_stderr\": 0.01210812488346097\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.29267078271260705,\n \"acc_stderr\": 0.004540586983229993,\n \"acc_norm\": 0.3152758414658435,\n \"acc_norm_stderr\": 0.0046367607625228515\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.22962962962962963,\n \"acc_stderr\": 0.03633384414073462,\n \"acc_norm\": 0.22962962962962963,\n \"acc_norm_stderr\": 0.03633384414073462\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.16447368421052633,\n \"acc_stderr\": 0.0301675334686327,\n \"acc_norm\": 0.16447368421052633,\n \"acc_norm_stderr\": 0.0301675334686327\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.17,\n \"acc_stderr\": 0.0377525168068637,\n \"acc_norm\": 0.17,\n \"acc_norm_stderr\": 0.0377525168068637\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.24150943396226415,\n \"acc_stderr\": 0.026341480371118345,\n \"acc_norm\": 0.24150943396226415,\n \"acc_norm_stderr\": 0.026341480371118345\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036846,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.24277456647398843,\n \"acc_stderr\": 0.0326926380614177,\n \"acc_norm\": 0.24277456647398843,\n \"acc_norm_stderr\": 0.0326926380614177\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.2549019607843137,\n \"acc_stderr\": 0.043364327079931785,\n \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.043364327079931785\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.16,\n \"acc_stderr\": 0.03684529491774709,\n \"acc_norm\": 0.16,\n \"acc_norm_stderr\": 0.03684529491774709\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.2723404255319149,\n \"acc_stderr\": 0.029101290698386698,\n \"acc_norm\": 0.2723404255319149,\n \"acc_norm_stderr\": 0.029101290698386698\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2631578947368421,\n \"acc_stderr\": 0.041424397194893624,\n \"acc_norm\": 0.2631578947368421,\n \"acc_norm_stderr\": 0.041424397194893624\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.03565998174135302,\n \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.03565998174135302\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.25396825396825395,\n \"acc_stderr\": 0.022418042891113942,\n \"acc_norm\": 0.25396825396825395,\n \"acc_norm_stderr\": 0.022418042891113942\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.14285714285714285,\n \"acc_stderr\": 0.0312984318574381,\n \"acc_norm\": 0.14285714285714285,\n \"acc_norm_stderr\": 0.0312984318574381\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.15,\n \"acc_stderr\": 0.035887028128263686,\n \"acc_norm\": 0.15,\n \"acc_norm_stderr\": 0.035887028128263686\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.2967741935483871,\n \"acc_stderr\": 0.025988500792411894,\n \"acc_norm\": 0.2967741935483871,\n \"acc_norm_stderr\": 0.025988500792411894\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.270935960591133,\n \"acc_stderr\": 0.03127090713297698,\n \"acc_norm\": 0.270935960591133,\n \"acc_norm_stderr\": 0.03127090713297698\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768079,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768079\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.35353535353535354,\n \"acc_stderr\": 0.03406086723547153,\n \"acc_norm\": 0.35353535353535354,\n \"acc_norm_stderr\": 0.03406086723547153\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.36787564766839376,\n \"acc_stderr\": 0.03480175668466036,\n \"acc_norm\": 0.36787564766839376,\n \"acc_norm_stderr\": 0.03480175668466036\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.2717948717948718,\n \"acc_stderr\": 0.022556551010132358,\n \"acc_norm\": 0.2717948717948718,\n \"acc_norm_stderr\": 0.022556551010132358\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.26296296296296295,\n \"acc_stderr\": 0.026842057873833706,\n \"acc_norm\": 0.26296296296296295,\n \"acc_norm_stderr\": 0.026842057873833706\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.28991596638655465,\n \"acc_stderr\": 0.029472485833136098,\n \"acc_norm\": 0.28991596638655465,\n \"acc_norm_stderr\": 0.029472485833136098\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.271523178807947,\n \"acc_stderr\": 0.03631329803969654,\n \"acc_norm\": 0.271523178807947,\n \"acc_norm_stderr\": 0.03631329803969654\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.3486238532110092,\n \"acc_stderr\": 0.020431254090714328,\n \"acc_norm\": 0.3486238532110092,\n \"acc_norm_stderr\": 0.020431254090714328\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4722222222222222,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\": 0.4722222222222222,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.24472573839662448,\n \"acc_stderr\": 0.027985699387036416,\n \"acc_norm\": 0.24472573839662448,\n \"acc_norm_stderr\": 0.027985699387036416\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.2914798206278027,\n \"acc_stderr\": 0.030500283176545923,\n \"acc_norm\": 0.2914798206278027,\n \"acc_norm_stderr\": 0.030500283176545923\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.26717557251908397,\n \"acc_stderr\": 0.038808483010823944,\n \"acc_norm\": 0.26717557251908397,\n \"acc_norm_stderr\": 0.038808483010823944\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.32231404958677684,\n \"acc_stderr\": 0.04266416363352168,\n \"acc_norm\": 0.32231404958677684,\n \"acc_norm_stderr\": 0.04266416363352168\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.21296296296296297,\n \"acc_stderr\": 0.03957835471980981,\n \"acc_norm\": 0.21296296296296297,\n \"acc_norm_stderr\": 0.03957835471980981\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.26380368098159507,\n \"acc_stderr\": 0.03462419931615623,\n \"acc_norm\": 0.26380368098159507,\n \"acc_norm_stderr\": 0.03462419931615623\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.25892857142857145,\n \"acc_stderr\": 0.041577515398656284,\n \"acc_norm\": 0.25892857142857145,\n \"acc_norm_stderr\": 0.041577515398656284\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.34951456310679613,\n \"acc_stderr\": 0.04721188506097173,\n \"acc_norm\": 0.34951456310679613,\n \"acc_norm_stderr\": 0.04721188506097173\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.1794871794871795,\n \"acc_stderr\": 0.025140935950335418,\n \"acc_norm\": 0.1794871794871795,\n \"acc_norm_stderr\": 0.025140935950335418\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.21583652618135377,\n \"acc_stderr\": 0.014711684386139958,\n \"acc_norm\": 0.21583652618135377,\n \"acc_norm_stderr\": 0.014711684386139958\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.24277456647398843,\n \"acc_stderr\": 0.0230836585869842,\n \"acc_norm\": 0.24277456647398843,\n \"acc_norm_stderr\": 0.0230836585869842\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n \"acc_stderr\": 0.014333522059217889,\n \"acc_norm\": 0.2424581005586592,\n \"acc_norm_stderr\": 0.014333522059217889\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.21895424836601307,\n \"acc_stderr\": 0.02367908986180772,\n \"acc_norm\": 0.21895424836601307,\n \"acc_norm_stderr\": 0.02367908986180772\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.24758842443729903,\n \"acc_stderr\": 0.024513879973621967,\n \"acc_norm\": 0.24758842443729903,\n \"acc_norm_stderr\": 0.024513879973621967\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.22530864197530864,\n \"acc_stderr\": 0.023246202647819746,\n \"acc_norm\": 0.22530864197530864,\n \"acc_norm_stderr\": 0.023246202647819746\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.26595744680851063,\n \"acc_stderr\": 0.026358065698880592,\n \"acc_norm\": 0.26595744680851063,\n \"acc_norm_stderr\": 0.026358065698880592\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2457627118644068,\n \"acc_stderr\": 0.010996156635142692,\n \"acc_norm\": 0.2457627118644068,\n \"acc_norm_stderr\": 0.010996156635142692\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.44485294117647056,\n \"acc_stderr\": 0.030187532060329376,\n \"acc_norm\": 0.44485294117647056,\n \"acc_norm_stderr\": 0.030187532060329376\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.26143790849673204,\n \"acc_stderr\": 0.017776947157528034,\n \"acc_norm\": 0.26143790849673204,\n \"acc_norm_stderr\": 0.017776947157528034\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03955932861795833,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03955932861795833\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.031362502409358936,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.031362502409358936\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.22885572139303484,\n \"acc_stderr\": 0.029705284056772426,\n \"acc_norm\": 0.22885572139303484,\n \"acc_norm_stderr\": 0.029705284056772426\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.04461960433384739,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.04461960433384739\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.1927710843373494,\n \"acc_stderr\": 0.030709824050565274,\n \"acc_norm\": 0.1927710843373494,\n \"acc_norm_stderr\": 0.030709824050565274\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.21052631578947367,\n \"acc_stderr\": 0.0312678171466318,\n \"acc_norm\": 0.21052631578947367,\n \"acc_norm_stderr\": 0.0312678171466318\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.22766217870257038,\n \"mc1_stderr\": 0.01467925503211107,\n \"mc2\": 0.4069116400376613,\n \"mc2_stderr\": 0.014934250122346554\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5043409629044988,\n \"acc_stderr\": 0.014051956064076887\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.006823351023502654,\n \"acc_stderr\": 0.0022675371022544736\n }\n}\n```", "repo_url": "https://huggingface.co/gpt2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|arc:challenge|25_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|arc:challenge|25_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|arc:challenge|25_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|arc:challenge|25_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|arc:challenge|25_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|arc:challenge|25_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|arc:challenge|25_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_drop_0", "data_files": [{"split": "2023_09_14T13_54_21.687636", "path": ["**/details_harness|drop|0_2023-09-14T13-54-21.687636.parquet"]}, {"split": "2023_09_15T12_28_23.937147", "path": ["**/details_harness|drop|0_2023-09-15T12-28-23.937147.parquet"]}, {"split": "2023_09_15T12_47_31.231445", "path": ["**/details_harness|drop|0_2023-09-15T12-47-31.231445.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|0_2023-09-15T12-47-31.231445.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|drop|3_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_11_29T12_47_35.686694", "path": ["**/details_harness|drop|3_2023-11-29T12-47-35.686694.parquet"]}, {"split": "2023_11_29T12_58_42.860611", "path": ["**/details_harness|drop|3_2023-11-29T12-58-42.860611.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-11-29T12-58-42.860611.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|gsm8k|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_11_29T12_47_35.686694", "path": ["**/details_harness|gsm8k|5_2023-11-29T12-47-35.686694.parquet"]}, {"split": "2023_11_29T12_58_42.860611", "path": ["**/details_harness|gsm8k|5_2023-11-29T12-58-42.860611.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|gsm8k|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|gsm8k|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|gsm8k|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|gsm8k|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|gsm8k|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|gsm8k|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hellaswag|10_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hellaswag|10_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hellaswag|10_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hellaswag|10_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hellaswag|10_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hellaswag|10_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hellaswag|10_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-management|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-virology|5_2023-11-21T18-07-07.067275.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-16T13-32-55.332102.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-19T14-19-42.718116.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-management|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-virology|5_2023-12-23T15-28-59.872701.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-10T14-42-55.873500.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-18T14-12-21.064569.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-22T13-56-20.291666.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-management|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-management|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-virology|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-virology|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|truthfulqa:mc|0_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|truthfulqa:mc|0_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_06T15_19_52.414673", "path": ["**/details_harness|winogrande|5_2023-09-06T15-19-52.414673.parquet"]}, {"split": "2023_09_06T15_22_24.734466", "path": ["**/details_harness|winogrande|5_2023-09-06T15-22-24.734466.parquet"]}, {"split": "2023_09_06T15_24_04.768979", "path": ["**/details_harness|winogrande|5_2023-09-06T15-24-04.768979.parquet"]}, {"split": "2023_09_07T12_01_51.839651", "path": ["**/details_harness|winogrande|5_2023-09-07T12-01-51.839651.parquet"]}, {"split": "2023_09_07T12_04_01.189528", "path": ["**/details_harness|winogrande|5_2023-09-07T12-04-01.189528.parquet"]}, {"split": "2023_09_07T12_08_17.821371", "path": ["**/details_harness|winogrande|5_2023-09-07T12-08-17.821371.parquet"]}, {"split": "2023_09_07T12_10_30.286469", "path": ["**/details_harness|winogrande|5_2023-09-07T12-10-30.286469.parquet"]}, {"split": "2023_11_21T18_07_07.067275", "path": ["**/details_harness|winogrande|5_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_11_29T12_47_35.686694", "path": ["**/details_harness|winogrande|5_2023-11-29T12-47-35.686694.parquet"]}, {"split": "2023_11_29T12_58_42.860611", "path": ["**/details_harness|winogrande|5_2023-11-29T12-58-42.860611.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["**/details_harness|winogrande|5_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["**/details_harness|winogrande|5_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["**/details_harness|winogrande|5_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["**/details_harness|winogrande|5_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["**/details_harness|winogrande|5_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["**/details_harness|winogrande|5_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-22T13-56-20.291666.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_09_06T12_19_07.283399", "path": ["results_2023-09-06T12-19-07.283399.parquet"]}, {"split": "2023_09_06T12_21_24.071294", "path": ["results_2023-09-06T12-21-24.071294.parquet"]}, {"split": "2023_09_06T12_24_13.323279", "path": ["results_2023-09-06T12-24-13.323279.parquet"]}, {"split": "2023_09_06T13_26_17.619860", "path": ["results_2023-09-06T13-26-17.619860.parquet"]}, {"split": "2023_09_06T15_15_44.379880", "path": ["results_2023-09-06T15-15-44.379880.parquet"]}, {"split": "2023_09_06T15_19_52.414673", "path": ["results_2023-09-06T15-19-52.414673.parquet"]}, {"split": "2023_09_06T15_22_24.734466", "path": ["results_2023-09-06T15-22-24.734466.parquet"]}, {"split": "2023_09_06T15_24_04.768979", "path": ["results_2023-09-06T15-24-04.768979.parquet"]}, {"split": "2023_09_07T12_01_51.839651", "path": ["results_2023-09-07T12-01-51.839651.parquet"]}, {"split": "2023_09_07T12_04_01.189528", "path": ["results_2023-09-07T12-04-01.189528.parquet"]}, {"split": "2023_09_07T12_08_17.821371", "path": ["results_2023-09-07T12-08-17.821371.parquet"]}, {"split": "2023_09_07T12_10_30.286469", "path": ["results_2023-09-07T12-10-30.286469.parquet"]}, {"split": "2023_09_14T13_54_21.687636", "path": ["results_2023-09-14T13-54-21.687636.parquet"]}, {"split": "2023_09_15T12_28_23.937147", "path": ["results_2023-09-15T12-28-23.937147.parquet"]}, {"split": "2023_09_15T12_47_31.231445", "path": ["results_2023-09-15T12-47-31.231445.parquet"]}, {"split": "2023_11_21T18_07_07.067275", "path": ["results_2023-11-21T18-07-07.067275.parquet"]}, {"split": "2023_11_29T12_47_35.686694", "path": ["results_2023-11-29T12-47-35.686694.parquet"]}, {"split": "2023_11_29T12_58_42.860611", "path": ["results_2023-11-29T12-58-42.860611.parquet"]}, {"split": "2023_12_16T13_32_55.332102", "path": ["results_2023-12-16T13-32-55.332102.parquet"]}, {"split": "2023_12_19T14_19_42.718116", "path": ["results_2023-12-19T14-19-42.718116.parquet"]}, {"split": "2023_12_23T15_28_59.872701", "path": ["results_2023-12-23T15-28-59.872701.parquet"]}, {"split": "2024_01_10T14_42_55.873500", "path": ["results_2024-01-10T14-42-55.873500.parquet"]}, {"split": "2024_01_18T14_12_21.064569", "path": ["results_2024-01-18T14-12-21.064569.parquet"]}, {"split": "2024_01_22T13_56_20.291666", "path": ["results_2024-01-22T13-56-20.291666.parquet"]}, {"split": "latest", "path": ["results_2024-01-22T13-56-20.291666.parquet"]}]}]}
2024-01-22T13:57:43+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of gpt2 Dataset automatically created during the evaluation run of model gpt2 on the Open LLM Leaderboard. The dataset is composed of 65 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 24 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-22T13:56:20.291666(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of gpt2\n\n\n\nDataset automatically created during the evaluation run of model gpt2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 65 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 24 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T13:56:20.291666(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of gpt2\n\n\n\nDataset automatically created during the evaluation run of model gpt2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 65 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 24 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-22T13:56:20.291666(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 167, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of gpt2\n\n\n\nDataset automatically created during the evaluation run of model gpt2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 65 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 24 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-22T13:56:20.291666(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
47694ec29614e359ece239035066ec22ac0fb91f
# Dataset Card for "BGL_DistilRoBERTa_Baseline" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
EgilKarlsen/BGL_DistilRoBERTa_Baseline
[ "region:us" ]
2023-08-18T13:42:31+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "0", "dtype": "float32"}, {"name": "1", "dtype": "float32"}, {"name": "2", "dtype": "float32"}, {"name": "3", "dtype": "float32"}, {"name": "4", "dtype": "float32"}, {"name": "5", "dtype": "float32"}, {"name": "6", "dtype": "float32"}, {"name": "7", "dtype": "float32"}, {"name": "8", "dtype": "float32"}, {"name": "9", "dtype": "float32"}, {"name": "10", "dtype": "float32"}, {"name": "11", "dtype": "float32"}, {"name": "12", "dtype": "float32"}, {"name": "13", "dtype": "float32"}, {"name": "14", "dtype": "float32"}, {"name": "15", "dtype": "float32"}, {"name": "16", "dtype": "float32"}, {"name": "17", "dtype": "float32"}, {"name": "18", "dtype": "float32"}, {"name": "19", "dtype": "float32"}, {"name": "20", "dtype": "float32"}, {"name": "21", "dtype": "float32"}, {"name": "22", "dtype": "float32"}, {"name": "23", "dtype": "float32"}, {"name": "24", "dtype": "float32"}, {"name": "25", "dtype": "float32"}, {"name": "26", "dtype": "float32"}, {"name": "27", "dtype": "float32"}, {"name": "28", "dtype": "float32"}, {"name": "29", "dtype": "float32"}, {"name": "30", "dtype": "float32"}, {"name": "31", "dtype": "float32"}, {"name": "32", "dtype": "float32"}, {"name": "33", "dtype": "float32"}, {"name": "34", "dtype": "float32"}, {"name": "35", "dtype": "float32"}, {"name": "36", "dtype": "float32"}, {"name": "37", "dtype": "float32"}, {"name": "38", "dtype": "float32"}, {"name": "39", "dtype": "float32"}, {"name": "40", "dtype": "float32"}, {"name": "41", "dtype": "float32"}, {"name": "42", "dtype": "float32"}, {"name": "43", "dtype": "float32"}, {"name": "44", "dtype": "float32"}, {"name": "45", "dtype": "float32"}, {"name": "46", "dtype": "float32"}, {"name": "47", "dtype": "float32"}, {"name": "48", "dtype": "float32"}, {"name": "49", "dtype": "float32"}, {"name": "50", "dtype": "float32"}, {"name": "51", "dtype": "float32"}, {"name": "52", "dtype": "float32"}, {"name": "53", "dtype": "float32"}, {"name": "54", "dtype": "float32"}, {"name": "55", "dtype": "float32"}, {"name": "56", "dtype": "float32"}, {"name": "57", "dtype": "float32"}, {"name": "58", "dtype": "float32"}, {"name": "59", "dtype": "float32"}, {"name": "60", "dtype": "float32"}, {"name": "61", "dtype": "float32"}, {"name": "62", "dtype": "float32"}, {"name": "63", "dtype": "float32"}, {"name": "64", "dtype": "float32"}, {"name": "65", "dtype": "float32"}, {"name": "66", "dtype": "float32"}, {"name": "67", "dtype": "float32"}, {"name": "68", "dtype": "float32"}, {"name": "69", "dtype": "float32"}, {"name": "70", "dtype": "float32"}, {"name": "71", "dtype": "float32"}, {"name": "72", "dtype": "float32"}, {"name": "73", "dtype": "float32"}, {"name": "74", "dtype": "float32"}, {"name": "75", "dtype": "float32"}, {"name": "76", "dtype": "float32"}, {"name": "77", "dtype": "float32"}, {"name": "78", "dtype": "float32"}, {"name": "79", "dtype": "float32"}, {"name": "80", "dtype": "float32"}, {"name": "81", "dtype": "float32"}, {"name": "82", "dtype": "float32"}, {"name": "83", "dtype": "float32"}, {"name": "84", "dtype": "float32"}, {"name": "85", "dtype": "float32"}, {"name": "86", "dtype": "float32"}, {"name": "87", "dtype": "float32"}, {"name": "88", "dtype": "float32"}, {"name": "89", "dtype": "float32"}, {"name": "90", "dtype": "float32"}, {"name": "91", "dtype": "float32"}, {"name": "92", "dtype": "float32"}, {"name": "93", "dtype": "float32"}, {"name": "94", "dtype": "float32"}, {"name": "95", "dtype": "float32"}, {"name": "96", "dtype": "float32"}, {"name": "97", "dtype": "float32"}, {"name": "98", "dtype": "float32"}, {"name": "99", "dtype": "float32"}, {"name": "100", "dtype": "float32"}, {"name": "101", "dtype": "float32"}, {"name": "102", "dtype": "float32"}, {"name": "103", "dtype": "float32"}, {"name": "104", "dtype": "float32"}, {"name": "105", "dtype": "float32"}, {"name": "106", "dtype": "float32"}, {"name": "107", "dtype": "float32"}, {"name": "108", "dtype": "float32"}, {"name": "109", "dtype": "float32"}, {"name": "110", "dtype": "float32"}, {"name": "111", "dtype": "float32"}, {"name": "112", "dtype": "float32"}, {"name": "113", "dtype": "float32"}, {"name": "114", "dtype": "float32"}, {"name": "115", "dtype": "float32"}, {"name": "116", "dtype": "float32"}, {"name": "117", "dtype": "float32"}, {"name": "118", "dtype": "float32"}, {"name": "119", "dtype": "float32"}, {"name": "120", "dtype": "float32"}, {"name": "121", "dtype": "float32"}, {"name": "122", "dtype": "float32"}, {"name": "123", "dtype": "float32"}, {"name": "124", "dtype": "float32"}, {"name": "125", "dtype": "float32"}, {"name": "126", "dtype": "float32"}, {"name": "127", "dtype": "float32"}, {"name": "128", "dtype": "float32"}, {"name": "129", "dtype": "float32"}, {"name": "130", "dtype": "float32"}, {"name": "131", "dtype": "float32"}, {"name": "132", "dtype": "float32"}, {"name": "133", "dtype": "float32"}, {"name": "134", "dtype": "float32"}, {"name": "135", "dtype": "float32"}, {"name": "136", "dtype": "float32"}, {"name": "137", "dtype": "float32"}, {"name": "138", "dtype": "float32"}, {"name": "139", "dtype": "float32"}, {"name": "140", "dtype": "float32"}, {"name": "141", "dtype": "float32"}, {"name": "142", "dtype": "float32"}, {"name": "143", "dtype": "float32"}, {"name": "144", "dtype": "float32"}, {"name": "145", "dtype": "float32"}, {"name": "146", "dtype": "float32"}, {"name": "147", "dtype": "float32"}, {"name": "148", "dtype": "float32"}, {"name": "149", "dtype": "float32"}, {"name": "150", "dtype": "float32"}, {"name": "151", "dtype": "float32"}, {"name": "152", "dtype": "float32"}, {"name": "153", "dtype": "float32"}, {"name": "154", "dtype": "float32"}, {"name": "155", "dtype": "float32"}, {"name": "156", "dtype": "float32"}, {"name": "157", "dtype": "float32"}, {"name": "158", "dtype": "float32"}, {"name": "159", "dtype": "float32"}, {"name": "160", "dtype": "float32"}, {"name": "161", "dtype": "float32"}, {"name": "162", "dtype": "float32"}, {"name": "163", "dtype": "float32"}, {"name": "164", "dtype": "float32"}, {"name": "165", "dtype": "float32"}, {"name": "166", "dtype": "float32"}, {"name": "167", "dtype": "float32"}, {"name": "168", "dtype": "float32"}, {"name": "169", "dtype": "float32"}, {"name": "170", "dtype": "float32"}, {"name": "171", "dtype": "float32"}, {"name": "172", "dtype": "float32"}, {"name": "173", "dtype": "float32"}, {"name": "174", "dtype": "float32"}, {"name": "175", "dtype": "float32"}, {"name": "176", "dtype": "float32"}, {"name": "177", "dtype": "float32"}, {"name": "178", "dtype": "float32"}, {"name": "179", "dtype": "float32"}, {"name": "180", "dtype": "float32"}, {"name": "181", "dtype": "float32"}, {"name": "182", "dtype": "float32"}, {"name": "183", "dtype": "float32"}, {"name": "184", "dtype": "float32"}, {"name": "185", "dtype": "float32"}, {"name": "186", "dtype": "float32"}, {"name": "187", "dtype": "float32"}, {"name": "188", "dtype": "float32"}, {"name": "189", "dtype": "float32"}, {"name": "190", "dtype": "float32"}, {"name": "191", "dtype": "float32"}, {"name": "192", "dtype": "float32"}, {"name": "193", "dtype": "float32"}, {"name": "194", "dtype": "float32"}, {"name": "195", "dtype": "float32"}, {"name": "196", "dtype": "float32"}, {"name": "197", "dtype": "float32"}, {"name": "198", "dtype": "float32"}, {"name": "199", "dtype": "float32"}, {"name": "200", "dtype": "float32"}, {"name": "201", "dtype": "float32"}, {"name": "202", "dtype": "float32"}, {"name": "203", "dtype": "float32"}, {"name": "204", "dtype": "float32"}, {"name": "205", "dtype": "float32"}, {"name": "206", "dtype": "float32"}, {"name": "207", "dtype": "float32"}, {"name": "208", "dtype": "float32"}, {"name": "209", "dtype": "float32"}, {"name": "210", "dtype": "float32"}, {"name": "211", "dtype": "float32"}, {"name": "212", "dtype": "float32"}, {"name": "213", "dtype": "float32"}, {"name": "214", "dtype": "float32"}, {"name": "215", "dtype": "float32"}, {"name": "216", "dtype": "float32"}, {"name": "217", "dtype": "float32"}, {"name": "218", "dtype": "float32"}, {"name": "219", "dtype": "float32"}, {"name": "220", "dtype": "float32"}, {"name": "221", "dtype": "float32"}, {"name": "222", "dtype": "float32"}, {"name": "223", "dtype": "float32"}, {"name": "224", "dtype": "float32"}, {"name": "225", "dtype": "float32"}, {"name": "226", "dtype": "float32"}, {"name": "227", "dtype": "float32"}, {"name": "228", "dtype": "float32"}, {"name": "229", "dtype": "float32"}, {"name": "230", "dtype": "float32"}, {"name": "231", "dtype": "float32"}, {"name": "232", "dtype": "float32"}, {"name": "233", "dtype": "float32"}, {"name": "234", "dtype": "float32"}, {"name": "235", "dtype": "float32"}, {"name": "236", "dtype": "float32"}, {"name": "237", "dtype": "float32"}, {"name": "238", "dtype": "float32"}, {"name": "239", "dtype": "float32"}, {"name": "240", "dtype": "float32"}, {"name": "241", "dtype": "float32"}, {"name": "242", "dtype": "float32"}, {"name": "243", "dtype": "float32"}, {"name": "244", "dtype": "float32"}, {"name": "245", "dtype": "float32"}, {"name": "246", "dtype": "float32"}, {"name": "247", "dtype": "float32"}, {"name": "248", "dtype": "float32"}, {"name": "249", "dtype": "float32"}, {"name": "250", "dtype": "float32"}, {"name": "251", "dtype": "float32"}, {"name": "252", "dtype": "float32"}, {"name": "253", "dtype": "float32"}, {"name": "254", "dtype": "float32"}, {"name": "255", "dtype": "float32"}, {"name": "256", "dtype": "float32"}, {"name": "257", "dtype": "float32"}, {"name": "258", "dtype": "float32"}, {"name": "259", "dtype": "float32"}, {"name": "260", "dtype": "float32"}, {"name": "261", "dtype": "float32"}, {"name": "262", "dtype": "float32"}, {"name": "263", "dtype": "float32"}, {"name": "264", "dtype": "float32"}, {"name": "265", "dtype": "float32"}, {"name": "266", "dtype": "float32"}, {"name": "267", "dtype": "float32"}, {"name": "268", "dtype": "float32"}, {"name": "269", "dtype": "float32"}, {"name": "270", "dtype": "float32"}, {"name": "271", "dtype": "float32"}, {"name": "272", "dtype": "float32"}, {"name": "273", "dtype": "float32"}, {"name": "274", "dtype": "float32"}, {"name": "275", "dtype": "float32"}, {"name": "276", "dtype": "float32"}, {"name": "277", "dtype": "float32"}, {"name": "278", "dtype": "float32"}, {"name": "279", "dtype": "float32"}, {"name": "280", "dtype": "float32"}, {"name": "281", "dtype": "float32"}, {"name": "282", "dtype": "float32"}, {"name": "283", "dtype": "float32"}, {"name": "284", "dtype": "float32"}, {"name": "285", "dtype": "float32"}, {"name": "286", "dtype": "float32"}, {"name": "287", "dtype": "float32"}, {"name": "288", "dtype": "float32"}, {"name": "289", "dtype": "float32"}, {"name": "290", "dtype": "float32"}, {"name": "291", "dtype": "float32"}, {"name": "292", "dtype": "float32"}, {"name": "293", "dtype": "float32"}, {"name": "294", "dtype": "float32"}, {"name": "295", "dtype": "float32"}, {"name": "296", "dtype": "float32"}, {"name": "297", "dtype": "float32"}, {"name": "298", "dtype": "float32"}, {"name": "299", "dtype": "float32"}, {"name": "300", "dtype": "float32"}, {"name": "301", "dtype": "float32"}, {"name": "302", "dtype": "float32"}, {"name": "303", "dtype": "float32"}, {"name": "304", "dtype": "float32"}, {"name": "305", "dtype": "float32"}, {"name": "306", "dtype": "float32"}, {"name": "307", "dtype": "float32"}, {"name": "308", "dtype": "float32"}, {"name": "309", "dtype": "float32"}, {"name": "310", "dtype": "float32"}, {"name": "311", "dtype": "float32"}, {"name": "312", "dtype": "float32"}, {"name": "313", "dtype": "float32"}, {"name": "314", "dtype": "float32"}, {"name": "315", "dtype": "float32"}, {"name": "316", "dtype": "float32"}, {"name": "317", "dtype": "float32"}, {"name": "318", "dtype": "float32"}, {"name": "319", "dtype": "float32"}, {"name": "320", "dtype": "float32"}, {"name": "321", "dtype": "float32"}, {"name": "322", "dtype": "float32"}, {"name": "323", "dtype": "float32"}, {"name": "324", "dtype": "float32"}, {"name": "325", "dtype": "float32"}, {"name": "326", "dtype": "float32"}, {"name": "327", "dtype": "float32"}, {"name": "328", "dtype": "float32"}, {"name": "329", "dtype": "float32"}, {"name": "330", "dtype": "float32"}, {"name": "331", "dtype": "float32"}, {"name": "332", "dtype": "float32"}, {"name": "333", "dtype": "float32"}, {"name": "334", "dtype": "float32"}, {"name": "335", "dtype": "float32"}, {"name": "336", "dtype": "float32"}, {"name": "337", "dtype": "float32"}, {"name": "338", "dtype": "float32"}, {"name": "339", "dtype": "float32"}, {"name": "340", "dtype": "float32"}, {"name": "341", "dtype": "float32"}, {"name": "342", "dtype": "float32"}, {"name": "343", "dtype": "float32"}, {"name": "344", "dtype": "float32"}, {"name": "345", "dtype": "float32"}, {"name": "346", "dtype": "float32"}, {"name": "347", "dtype": "float32"}, {"name": "348", "dtype": "float32"}, {"name": "349", "dtype": "float32"}, {"name": "350", "dtype": "float32"}, {"name": "351", "dtype": "float32"}, {"name": "352", "dtype": "float32"}, {"name": "353", "dtype": "float32"}, {"name": "354", "dtype": "float32"}, {"name": "355", "dtype": "float32"}, {"name": "356", "dtype": "float32"}, {"name": "357", "dtype": "float32"}, {"name": "358", "dtype": "float32"}, {"name": "359", "dtype": "float32"}, {"name": "360", "dtype": "float32"}, {"name": "361", "dtype": "float32"}, {"name": "362", "dtype": "float32"}, {"name": "363", "dtype": "float32"}, {"name": "364", "dtype": "float32"}, {"name": "365", "dtype": "float32"}, {"name": "366", "dtype": "float32"}, {"name": "367", "dtype": "float32"}, {"name": "368", "dtype": "float32"}, {"name": "369", "dtype": "float32"}, {"name": "370", "dtype": "float32"}, {"name": "371", "dtype": "float32"}, {"name": "372", "dtype": "float32"}, {"name": "373", "dtype": "float32"}, {"name": "374", "dtype": "float32"}, {"name": "375", "dtype": "float32"}, {"name": "376", "dtype": "float32"}, {"name": "377", "dtype": "float32"}, {"name": "378", "dtype": "float32"}, {"name": "379", "dtype": "float32"}, {"name": "380", "dtype": "float32"}, {"name": "381", "dtype": "float32"}, {"name": "382", "dtype": "float32"}, {"name": "383", "dtype": "float32"}, {"name": "384", "dtype": "float32"}, {"name": "385", "dtype": "float32"}, {"name": "386", "dtype": "float32"}, {"name": "387", "dtype": "float32"}, {"name": "388", "dtype": "float32"}, {"name": "389", "dtype": "float32"}, {"name": "390", "dtype": "float32"}, {"name": "391", "dtype": "float32"}, {"name": "392", "dtype": "float32"}, {"name": "393", "dtype": "float32"}, {"name": "394", "dtype": "float32"}, {"name": "395", "dtype": "float32"}, {"name": "396", "dtype": "float32"}, {"name": "397", "dtype": "float32"}, {"name": "398", "dtype": "float32"}, {"name": "399", "dtype": "float32"}, {"name": "400", "dtype": "float32"}, {"name": "401", "dtype": "float32"}, {"name": "402", "dtype": "float32"}, {"name": "403", "dtype": "float32"}, {"name": "404", "dtype": "float32"}, {"name": "405", "dtype": "float32"}, {"name": "406", "dtype": "float32"}, {"name": "407", "dtype": "float32"}, {"name": "408", "dtype": "float32"}, {"name": "409", "dtype": "float32"}, {"name": "410", "dtype": "float32"}, {"name": "411", "dtype": "float32"}, {"name": "412", "dtype": "float32"}, {"name": "413", "dtype": "float32"}, {"name": "414", "dtype": "float32"}, {"name": "415", "dtype": "float32"}, {"name": "416", "dtype": "float32"}, {"name": "417", "dtype": "float32"}, {"name": "418", "dtype": "float32"}, {"name": "419", "dtype": "float32"}, {"name": "420", "dtype": "float32"}, {"name": "421", "dtype": "float32"}, {"name": "422", "dtype": "float32"}, {"name": "423", "dtype": "float32"}, {"name": "424", "dtype": "float32"}, {"name": "425", "dtype": "float32"}, {"name": "426", "dtype": "float32"}, {"name": "427", "dtype": "float32"}, {"name": "428", "dtype": "float32"}, {"name": "429", "dtype": "float32"}, {"name": "430", "dtype": "float32"}, {"name": "431", "dtype": "float32"}, {"name": "432", "dtype": "float32"}, {"name": "433", "dtype": "float32"}, {"name": "434", "dtype": "float32"}, {"name": "435", "dtype": "float32"}, {"name": "436", "dtype": "float32"}, {"name": "437", "dtype": "float32"}, {"name": "438", "dtype": "float32"}, {"name": "439", "dtype": "float32"}, {"name": "440", "dtype": "float32"}, {"name": "441", "dtype": "float32"}, {"name": "442", "dtype": "float32"}, {"name": "443", "dtype": "float32"}, {"name": "444", "dtype": "float32"}, {"name": "445", "dtype": "float32"}, {"name": "446", "dtype": "float32"}, {"name": "447", "dtype": "float32"}, {"name": "448", "dtype": "float32"}, {"name": "449", "dtype": "float32"}, {"name": "450", "dtype": "float32"}, {"name": "451", "dtype": "float32"}, {"name": "452", "dtype": "float32"}, {"name": "453", "dtype": "float32"}, {"name": "454", "dtype": "float32"}, {"name": "455", "dtype": "float32"}, {"name": "456", "dtype": "float32"}, {"name": "457", "dtype": "float32"}, {"name": "458", "dtype": "float32"}, {"name": "459", "dtype": "float32"}, {"name": "460", "dtype": "float32"}, {"name": "461", "dtype": "float32"}, {"name": "462", "dtype": "float32"}, {"name": "463", "dtype": "float32"}, {"name": "464", "dtype": "float32"}, {"name": "465", "dtype": "float32"}, {"name": "466", "dtype": "float32"}, {"name": "467", "dtype": "float32"}, {"name": "468", "dtype": "float32"}, {"name": "469", "dtype": "float32"}, {"name": "470", "dtype": "float32"}, {"name": "471", "dtype": "float32"}, {"name": "472", "dtype": "float32"}, {"name": "473", "dtype": "float32"}, {"name": "474", "dtype": "float32"}, {"name": "475", "dtype": "float32"}, {"name": "476", "dtype": "float32"}, {"name": "477", "dtype": "float32"}, {"name": "478", "dtype": "float32"}, {"name": "479", "dtype": "float32"}, {"name": "480", "dtype": "float32"}, {"name": "481", "dtype": "float32"}, {"name": "482", "dtype": "float32"}, {"name": "483", "dtype": "float32"}, {"name": "484", "dtype": "float32"}, {"name": "485", "dtype": "float32"}, {"name": "486", "dtype": "float32"}, {"name": "487", "dtype": "float32"}, {"name": "488", "dtype": "float32"}, {"name": "489", "dtype": "float32"}, {"name": "490", "dtype": "float32"}, {"name": "491", "dtype": "float32"}, {"name": "492", "dtype": "float32"}, {"name": "493", "dtype": "float32"}, {"name": "494", "dtype": "float32"}, {"name": "495", "dtype": "float32"}, {"name": "496", "dtype": "float32"}, {"name": "497", "dtype": "float32"}, {"name": "498", "dtype": "float32"}, {"name": "499", "dtype": "float32"}, {"name": "500", "dtype": "float32"}, {"name": "501", "dtype": "float32"}, {"name": "502", "dtype": "float32"}, {"name": "503", "dtype": "float32"}, {"name": "504", "dtype": "float32"}, {"name": "505", "dtype": "float32"}, {"name": "506", "dtype": "float32"}, {"name": "507", "dtype": "float32"}, {"name": "508", "dtype": "float32"}, {"name": "509", "dtype": "float32"}, {"name": "510", "dtype": "float32"}, {"name": "511", "dtype": "float32"}, {"name": "512", "dtype": "float32"}, {"name": "513", "dtype": "float32"}, {"name": "514", "dtype": "float32"}, {"name": "515", "dtype": "float32"}, {"name": "516", "dtype": "float32"}, {"name": "517", "dtype": "float32"}, {"name": "518", "dtype": "float32"}, {"name": "519", "dtype": "float32"}, {"name": "520", "dtype": "float32"}, {"name": "521", "dtype": "float32"}, {"name": "522", "dtype": "float32"}, {"name": "523", "dtype": "float32"}, {"name": "524", "dtype": "float32"}, {"name": "525", "dtype": "float32"}, {"name": "526", "dtype": "float32"}, {"name": "527", "dtype": "float32"}, {"name": "528", "dtype": "float32"}, {"name": "529", "dtype": "float32"}, {"name": "530", "dtype": "float32"}, {"name": "531", "dtype": "float32"}, {"name": "532", "dtype": "float32"}, {"name": "533", "dtype": "float32"}, {"name": "534", "dtype": "float32"}, {"name": "535", "dtype": "float32"}, {"name": "536", "dtype": "float32"}, {"name": "537", "dtype": "float32"}, {"name": "538", "dtype": "float32"}, {"name": "539", "dtype": "float32"}, {"name": "540", "dtype": "float32"}, {"name": "541", "dtype": "float32"}, {"name": "542", "dtype": "float32"}, {"name": "543", "dtype": "float32"}, {"name": "544", "dtype": "float32"}, {"name": "545", "dtype": "float32"}, {"name": "546", "dtype": "float32"}, {"name": "547", "dtype": "float32"}, {"name": "548", "dtype": "float32"}, {"name": "549", "dtype": "float32"}, {"name": "550", "dtype": "float32"}, {"name": "551", "dtype": "float32"}, {"name": "552", "dtype": "float32"}, {"name": "553", "dtype": "float32"}, {"name": "554", "dtype": "float32"}, {"name": "555", "dtype": "float32"}, {"name": "556", "dtype": "float32"}, {"name": "557", "dtype": "float32"}, {"name": "558", "dtype": "float32"}, {"name": "559", "dtype": "float32"}, {"name": "560", "dtype": "float32"}, {"name": "561", "dtype": "float32"}, {"name": "562", "dtype": "float32"}, {"name": "563", "dtype": "float32"}, {"name": "564", "dtype": "float32"}, {"name": "565", "dtype": "float32"}, {"name": "566", "dtype": "float32"}, {"name": "567", "dtype": "float32"}, {"name": "568", "dtype": "float32"}, {"name": "569", "dtype": "float32"}, {"name": "570", "dtype": "float32"}, {"name": "571", "dtype": "float32"}, {"name": "572", "dtype": "float32"}, {"name": "573", "dtype": "float32"}, {"name": "574", "dtype": "float32"}, {"name": "575", "dtype": "float32"}, {"name": "576", "dtype": "float32"}, {"name": "577", "dtype": "float32"}, {"name": "578", "dtype": "float32"}, {"name": "579", "dtype": "float32"}, {"name": "580", "dtype": "float32"}, {"name": "581", "dtype": "float32"}, {"name": "582", "dtype": "float32"}, {"name": "583", "dtype": "float32"}, {"name": "584", "dtype": "float32"}, {"name": "585", "dtype": "float32"}, {"name": "586", "dtype": "float32"}, {"name": "587", "dtype": "float32"}, {"name": "588", "dtype": "float32"}, {"name": "589", "dtype": "float32"}, {"name": "590", "dtype": "float32"}, {"name": "591", "dtype": "float32"}, {"name": "592", "dtype": "float32"}, {"name": "593", "dtype": "float32"}, {"name": "594", "dtype": "float32"}, {"name": "595", "dtype": "float32"}, {"name": "596", "dtype": "float32"}, {"name": "597", "dtype": "float32"}, {"name": "598", "dtype": "float32"}, {"name": "599", "dtype": "float32"}, {"name": "600", "dtype": "float32"}, {"name": "601", "dtype": "float32"}, {"name": "602", "dtype": "float32"}, {"name": "603", "dtype": "float32"}, {"name": "604", "dtype": "float32"}, {"name": "605", "dtype": "float32"}, {"name": "606", "dtype": "float32"}, {"name": "607", "dtype": "float32"}, {"name": "608", "dtype": "float32"}, {"name": "609", "dtype": "float32"}, {"name": "610", "dtype": "float32"}, {"name": "611", "dtype": "float32"}, {"name": "612", "dtype": "float32"}, {"name": "613", "dtype": "float32"}, {"name": "614", "dtype": "float32"}, {"name": "615", "dtype": "float32"}, {"name": "616", "dtype": "float32"}, {"name": "617", "dtype": "float32"}, {"name": "618", "dtype": "float32"}, {"name": "619", "dtype": "float32"}, {"name": "620", "dtype": "float32"}, {"name": "621", "dtype": "float32"}, {"name": "622", "dtype": "float32"}, {"name": "623", "dtype": "float32"}, {"name": "624", "dtype": "float32"}, {"name": "625", "dtype": "float32"}, {"name": "626", "dtype": "float32"}, {"name": "627", "dtype": "float32"}, {"name": "628", "dtype": "float32"}, {"name": "629", "dtype": "float32"}, {"name": "630", "dtype": "float32"}, {"name": "631", "dtype": "float32"}, {"name": "632", "dtype": "float32"}, {"name": "633", "dtype": "float32"}, {"name": "634", "dtype": "float32"}, {"name": "635", "dtype": "float32"}, {"name": "636", "dtype": "float32"}, {"name": "637", "dtype": "float32"}, {"name": "638", "dtype": "float32"}, {"name": "639", "dtype": "float32"}, {"name": "640", "dtype": "float32"}, {"name": "641", "dtype": "float32"}, {"name": "642", "dtype": "float32"}, {"name": "643", "dtype": "float32"}, {"name": "644", "dtype": "float32"}, {"name": "645", "dtype": "float32"}, {"name": "646", "dtype": "float32"}, {"name": "647", "dtype": "float32"}, {"name": "648", "dtype": "float32"}, {"name": "649", "dtype": "float32"}, {"name": "650", "dtype": "float32"}, {"name": "651", "dtype": "float32"}, {"name": "652", "dtype": "float32"}, {"name": "653", "dtype": "float32"}, {"name": "654", "dtype": "float32"}, {"name": "655", "dtype": "float32"}, {"name": "656", "dtype": "float32"}, {"name": "657", "dtype": "float32"}, {"name": "658", "dtype": "float32"}, {"name": "659", "dtype": "float32"}, {"name": "660", "dtype": "float32"}, {"name": "661", "dtype": "float32"}, {"name": "662", "dtype": "float32"}, {"name": "663", "dtype": "float32"}, {"name": "664", "dtype": "float32"}, {"name": "665", "dtype": "float32"}, {"name": "666", "dtype": "float32"}, {"name": "667", "dtype": "float32"}, {"name": "668", "dtype": "float32"}, {"name": "669", "dtype": "float32"}, {"name": "670", "dtype": "float32"}, {"name": "671", "dtype": "float32"}, {"name": "672", "dtype": "float32"}, {"name": "673", "dtype": "float32"}, {"name": "674", "dtype": "float32"}, {"name": "675", "dtype": "float32"}, {"name": "676", "dtype": "float32"}, {"name": "677", "dtype": "float32"}, {"name": "678", "dtype": "float32"}, {"name": "679", "dtype": "float32"}, {"name": "680", "dtype": "float32"}, {"name": "681", "dtype": "float32"}, {"name": "682", "dtype": "float32"}, {"name": "683", "dtype": "float32"}, {"name": "684", "dtype": "float32"}, {"name": "685", "dtype": "float32"}, {"name": "686", "dtype": "float32"}, {"name": "687", "dtype": "float32"}, {"name": "688", "dtype": "float32"}, {"name": "689", "dtype": "float32"}, {"name": "690", "dtype": "float32"}, {"name": "691", "dtype": "float32"}, {"name": "692", "dtype": "float32"}, {"name": "693", "dtype": "float32"}, {"name": "694", "dtype": "float32"}, {"name": "695", "dtype": "float32"}, {"name": "696", "dtype": "float32"}, {"name": "697", "dtype": "float32"}, {"name": "698", "dtype": "float32"}, {"name": "699", "dtype": "float32"}, {"name": "700", "dtype": "float32"}, {"name": "701", "dtype": "float32"}, {"name": "702", "dtype": "float32"}, {"name": "703", "dtype": "float32"}, {"name": "704", "dtype": "float32"}, {"name": "705", "dtype": "float32"}, {"name": "706", "dtype": "float32"}, {"name": "707", "dtype": "float32"}, {"name": "708", "dtype": "float32"}, {"name": "709", "dtype": "float32"}, {"name": "710", "dtype": "float32"}, {"name": "711", "dtype": "float32"}, {"name": "712", "dtype": "float32"}, {"name": "713", "dtype": "float32"}, {"name": "714", "dtype": "float32"}, {"name": "715", "dtype": "float32"}, {"name": "716", "dtype": "float32"}, {"name": "717", "dtype": "float32"}, {"name": "718", "dtype": "float32"}, {"name": "719", "dtype": "float32"}, {"name": "720", "dtype": "float32"}, {"name": "721", "dtype": "float32"}, {"name": "722", "dtype": "float32"}, {"name": "723", "dtype": "float32"}, {"name": "724", "dtype": "float32"}, {"name": "725", "dtype": "float32"}, {"name": "726", "dtype": "float32"}, {"name": "727", "dtype": "float32"}, {"name": "728", "dtype": "float32"}, {"name": "729", "dtype": "float32"}, {"name": "730", "dtype": "float32"}, {"name": "731", "dtype": "float32"}, {"name": "732", "dtype": "float32"}, {"name": "733", "dtype": "float32"}, {"name": "734", "dtype": "float32"}, {"name": "735", "dtype": "float32"}, {"name": "736", "dtype": "float32"}, {"name": "737", "dtype": "float32"}, {"name": "738", "dtype": "float32"}, {"name": "739", "dtype": "float32"}, {"name": "740", "dtype": "float32"}, {"name": "741", "dtype": "float32"}, {"name": "742", "dtype": "float32"}, {"name": "743", "dtype": "float32"}, {"name": "744", "dtype": "float32"}, {"name": "745", "dtype": "float32"}, {"name": "746", "dtype": "float32"}, {"name": "747", "dtype": "float32"}, {"name": "748", "dtype": "float32"}, {"name": "749", "dtype": "float32"}, {"name": "750", "dtype": "float32"}, {"name": "751", "dtype": "float32"}, {"name": "752", "dtype": "float32"}, {"name": "753", "dtype": "float32"}, {"name": "754", "dtype": "float32"}, {"name": "755", "dtype": "float32"}, {"name": "756", "dtype": "float32"}, {"name": "757", "dtype": "float32"}, {"name": "758", "dtype": "float32"}, {"name": "759", "dtype": "float32"}, {"name": "760", "dtype": "float32"}, {"name": "761", "dtype": "float32"}, {"name": "762", "dtype": "float32"}, {"name": "763", "dtype": "float32"}, {"name": "764", "dtype": "float32"}, {"name": "765", "dtype": "float32"}, {"name": "766", "dtype": "float32"}, {"name": "767", "dtype": "float32"}, {"name": "label", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 115582709.0625, "num_examples": 37500}, {"name": "test", "num_bytes": 38527570.0, "num_examples": 12500}], "download_size": 211881627, "dataset_size": 154110279.0625}}
2023-08-18T14:20:07+00:00
[]
[]
TAGS #region-us
# Dataset Card for "BGL_DistilRoBERTa_Baseline" More Information needed
[ "# Dataset Card for \"BGL_DistilRoBERTa_Baseline\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"BGL_DistilRoBERTa_Baseline\"\n\nMore Information needed" ]
[ 6, 21 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"BGL_DistilRoBERTa_Baseline\"\n\nMore Information needed" ]
2a1e8d71c796ff918537c632ff0c16abfe7b8b48
# Dataset Card for "omni3d_v2" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ZiAngGu/omni3d_v2
[ "region:us" ]
2023-08-18T13:46:26+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "conditioning_image", "dtype": "image"}, {"name": "text", "dtype": "string"}, {"name": "label", "sequence": "string"}], "splits": [{"name": "train", "num_bytes": 18091936016.3, "num_examples": 194700}], "download_size": 21810993407, "dataset_size": 18091936016.3}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-08-19T01:53:16+00:00
[]
[]
TAGS #region-us
# Dataset Card for "omni3d_v2" More Information needed
[ "# Dataset Card for \"omni3d_v2\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"omni3d_v2\"\n\nMore Information needed" ]
[ 6, 17 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"omni3d_v2\"\n\nMore Information needed" ]
e59642be2745fe01e4e9262ca5755ad699a8557d
# Dataset Card for "emotions" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Villian7/Emotions_Data
[ "license:apache-2.0", "doi:10.57967/hf/1000", "region:us" ]
2023-08-18T13:53:57+00:00
{"license": "apache-2.0", "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "text", "dtype": "string"}, {"name": "label", "dtype": "int64"}, {"name": "label_text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 109428773, "num_examples": 1096869}, {"name": "validation", "num_bytes": 13025428, "num_examples": 133105}, {"name": "test", "num_bytes": 13047201, "num_examples": 133104}], "download_size": 77478115, "dataset_size": 135501402}}
2023-08-18T14:16:29+00:00
[]
[]
TAGS #license-apache-2.0 #doi-10.57967/hf/1000 #region-us
# Dataset Card for "emotions" More Information needed
[ "# Dataset Card for \"emotions\"\n\nMore Information needed" ]
[ "TAGS\n#license-apache-2.0 #doi-10.57967/hf/1000 #region-us \n", "# Dataset Card for \"emotions\"\n\nMore Information needed" ]
[ 26, 12 ]
[ "passage: TAGS\n#license-apache-2.0 #doi-10.57967/hf/1000 #region-us \n# Dataset Card for \"emotions\"\n\nMore Information needed" ]
69f66c3972ca092dc50e21649527b13e6bceeb98
# Dataset Card for "donutdataset" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Amani123/donutdataset
[ "region:us" ]
2023-08-18T13:54:22+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "ground_truth", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 77291761.0, "num_examples": 96}], "download_size": 76288174, "dataset_size": 77291761.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-08-18T14:12:10+00:00
[]
[]
TAGS #region-us
# Dataset Card for "donutdataset" More Information needed
[ "# Dataset Card for \"donutdataset\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"donutdataset\"\n\nMore Information needed" ]
[ 6, 14 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"donutdataset\"\n\nMore Information needed" ]
000279dfc04a30bb795d435931d33cbe4a509a6a
# FakeNewsSet ## Dataset Description - **Homepage:** - **Repository:** [https://dl.acm.org/doi/abs/10.1145/3428658.3430965](https://dl.acm.org/doi/abs/10.1145/3428658.3430965) - **Paper:** [https://dl.acm.org/doi/abs/10.1145/3428658.3430965](https://dl.acm.org/doi/abs/10.1145/3428658.3430965) - **Leaderboard:** - **Point of Contact:** ### Dataset Summary ### Supported Tasks and Leaderboards [More Information Needed] ### Languages The dataset is in Portuguese. ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information If you use "FakeNewsSet", please cite: ```bibtex @inproceedings{10.1145/3428658.3430965, author = {da Silva, Fl\'{a}vio Roberto Matias and Freire, Paulo M\'{a}rcio Souza and de Souza, Marcelo Pereira and de A. B. Plenamente, Gustavo and Goldschmidt, Ronaldo Ribeiro}, title = {FakeNewsSetGen: A Process to Build Datasets That Support Comparison Among Fake News Detection Methods}, year = {2020}, isbn = {9781450381963}, publisher = {Association for Computing Machinery}, address = {New York, NY, USA}, url = {https://doi.org/10.1145/3428658.3430965}, doi = {10.1145/3428658.3430965}, abstract = {Due to easy access and low cost, social media online news consumption has increased significantly for the last decade. Despite their benefits, some social media allow anyone to post news with intense spreading power, which amplifies an old problem: the dissemination of Fake News. In the face of this scenario, several machine learning-based methods to automatically detect Fake News (MLFN) have been proposed. All of them require datasets to train and evaluate their detection models. Although recent MLFN were designed to consider data regarding the news propagation on social media, most of the few available datasets do not contain this kind of data. Hence, comparing the performances amid those recent MLFN and the others is restricted to a very limited number of datasets. Moreover, all existing datasets with propagation data do not contain news in Portuguese, which impairs the evaluation of the MLFN in this language. Thus, this work proposes FakeNewsSetGen, a process that builds Fake News datasets that contain news propagation data and support comparison amid the state-of-the-art MLFN. FakeNewsSetGen's software engineering process was guided to include all kind of data required by the existing MLFN. In order to illustrate FakeNewsSetGen's viability and adequacy, a case study was carried out. It encompassed the implementation of a FakeNewsSetGen prototype and the application of this prototype to create a dataset called FakeNewsSet, with news in Portuguese. Five MLFN with different kind of data requirements (two of them demanding news propagation data) were applied to FakeNewsSet and compared, demonstrating the potential use of both the proposed process and the created dataset.}, booktitle = {Proceedings of the Brazilian Symposium on Multimedia and the Web}, pages = {241–248}, numpages = {8}, keywords = {Fake News detection, Dataset building process, social media}, location = {S\~{a}o Lu\'{\i}s, Brazil}, series = {WebMedia '20} } ``` ### Contributions Thanks to [@ju-resplande](https://github.com/ju-resplande) for adding this dataset.
fake-news-UFG/FakeNewsSet
[ "task_categories:text-classification", "language_creators:found", "multilinguality:monolingual", "size_categories:n<1K", "language:pt", "license:mit", "region:us" ]
2023-08-18T13:54:33+00:00
{"language_creators": ["found"], "language": ["pt"], "license": "mit", "multilinguality": ["monolingual"], "size_categories": ["n<1K"], "task_categories": ["text-classification"], "language_details": "pt-BR"}
2023-08-18T16:36:21+00:00
[]
[ "pt" ]
TAGS #task_categories-text-classification #language_creators-found #multilinguality-monolingual #size_categories-n<1K #language-Portuguese #license-mit #region-us
# FakeNewsSet ## Dataset Description - Homepage: - Repository: URL - Paper: URL - Leaderboard: - Point of Contact: ### Dataset Summary ### Supported Tasks and Leaderboards ### Languages The dataset is in Portuguese. ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information If you use "FakeNewsSet", please cite: ### Contributions Thanks to @ju-resplande for adding this dataset.
[ "# FakeNewsSet", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: URL\n- Leaderboard: \n- Point of Contact:", "### Dataset Summary", "### Supported Tasks and Leaderboards", "### Languages\n\nThe dataset is in Portuguese.", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information\n\n\n\n\n\nIf you use \"FakeNewsSet\", please cite:", "### Contributions\n\nThanks to @ju-resplande for adding this dataset." ]
[ "TAGS\n#task_categories-text-classification #language_creators-found #multilinguality-monolingual #size_categories-n<1K #language-Portuguese #license-mit #region-us \n", "# FakeNewsSet", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: URL\n- Leaderboard: \n- Point of Contact:", "### Dataset Summary", "### Supported Tasks and Leaderboards", "### Languages\n\nThe dataset is in Portuguese.", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information\n\n\n\n\n\nIf you use \"FakeNewsSet\", please cite:", "### Contributions\n\nThanks to @ju-resplande for adding this dataset." ]
[ 54, 6, 26, 6, 10, 13, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 19, 19 ]
[ "passage: TAGS\n#task_categories-text-classification #language_creators-found #multilinguality-monolingual #size_categories-n<1K #language-Portuguese #license-mit #region-us \n# FakeNewsSet## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: URL\n- Leaderboard: \n- Point of Contact:### Dataset Summary### Supported Tasks and Leaderboards### Languages\n\nThe dataset is in Portuguese.## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information\n\n\n\n\n\nIf you use \"FakeNewsSet\", please cite:### Contributions\n\nThanks to @ju-resplande for adding this dataset." ]
cb58d7f7b31691ce73ff483da26b3a7df8091fba
# Dataset Card for "BGL_GPT2_Baseline" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
EgilKarlsen/BGL_GPT2_Baseline
[ "region:us" ]
2023-08-18T13:55:35+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "0", "dtype": "float32"}, {"name": "1", "dtype": "float32"}, {"name": "2", "dtype": "float32"}, {"name": "3", "dtype": "float32"}, {"name": "4", "dtype": "float32"}, {"name": "5", "dtype": "float32"}, {"name": "6", "dtype": "float32"}, {"name": "7", "dtype": "float32"}, {"name": "8", "dtype": "float32"}, {"name": "9", "dtype": "float32"}, {"name": "10", "dtype": "float32"}, {"name": "11", "dtype": "float32"}, {"name": "12", "dtype": "float32"}, {"name": "13", "dtype": "float32"}, {"name": "14", "dtype": "float32"}, {"name": "15", "dtype": "float32"}, {"name": "16", "dtype": "float32"}, {"name": "17", "dtype": "float32"}, {"name": "18", "dtype": "float32"}, {"name": "19", "dtype": "float32"}, {"name": "20", "dtype": "float32"}, {"name": "21", "dtype": "float32"}, {"name": "22", "dtype": "float32"}, {"name": "23", "dtype": "float32"}, {"name": "24", "dtype": "float32"}, {"name": "25", "dtype": "float32"}, {"name": "26", "dtype": "float32"}, {"name": "27", "dtype": "float32"}, {"name": "28", "dtype": "float32"}, {"name": "29", "dtype": "float32"}, {"name": "30", "dtype": "float32"}, {"name": "31", "dtype": "float32"}, {"name": "32", "dtype": "float32"}, {"name": "33", "dtype": "float32"}, {"name": "34", "dtype": "float32"}, {"name": "35", "dtype": "float32"}, {"name": "36", "dtype": "float32"}, {"name": "37", "dtype": "float32"}, {"name": "38", "dtype": "float32"}, {"name": "39", "dtype": "float32"}, {"name": "40", "dtype": "float32"}, {"name": "41", "dtype": "float32"}, {"name": "42", "dtype": "float32"}, {"name": "43", "dtype": "float32"}, {"name": "44", "dtype": "float32"}, {"name": "45", "dtype": "float32"}, {"name": "46", "dtype": "float32"}, {"name": "47", "dtype": "float32"}, {"name": "48", "dtype": "float32"}, {"name": "49", "dtype": "float32"}, {"name": "50", "dtype": "float32"}, {"name": "51", "dtype": "float32"}, {"name": "52", "dtype": "float32"}, {"name": "53", "dtype": "float32"}, {"name": "54", "dtype": "float32"}, {"name": "55", "dtype": "float32"}, {"name": "56", "dtype": "float32"}, {"name": "57", "dtype": "float32"}, {"name": "58", "dtype": "float32"}, {"name": "59", "dtype": "float32"}, {"name": "60", "dtype": "float32"}, {"name": "61", "dtype": "float32"}, {"name": "62", "dtype": "float32"}, {"name": "63", "dtype": "float32"}, {"name": "64", "dtype": "float32"}, {"name": "65", "dtype": "float32"}, {"name": "66", "dtype": "float32"}, {"name": "67", "dtype": "float32"}, {"name": "68", "dtype": "float32"}, {"name": "69", "dtype": "float32"}, {"name": "70", "dtype": "float32"}, {"name": "71", "dtype": "float32"}, {"name": "72", "dtype": "float32"}, {"name": "73", "dtype": "float32"}, {"name": "74", "dtype": "float32"}, {"name": "75", "dtype": "float32"}, {"name": "76", "dtype": "float32"}, {"name": "77", "dtype": "float32"}, {"name": "78", "dtype": "float32"}, {"name": "79", "dtype": "float32"}, {"name": "80", "dtype": "float32"}, {"name": "81", "dtype": "float32"}, {"name": "82", "dtype": "float32"}, {"name": "83", "dtype": "float32"}, {"name": "84", "dtype": "float32"}, {"name": "85", "dtype": "float32"}, {"name": "86", "dtype": "float32"}, {"name": "87", "dtype": "float32"}, {"name": "88", "dtype": "float32"}, {"name": "89", "dtype": "float32"}, {"name": "90", "dtype": "float32"}, {"name": "91", "dtype": "float32"}, {"name": "92", "dtype": "float32"}, {"name": "93", "dtype": "float32"}, {"name": "94", "dtype": "float32"}, {"name": "95", "dtype": "float32"}, {"name": "96", "dtype": "float32"}, {"name": "97", "dtype": "float32"}, {"name": "98", "dtype": "float32"}, {"name": "99", "dtype": "float32"}, {"name": "100", "dtype": "float32"}, {"name": "101", "dtype": "float32"}, {"name": "102", "dtype": "float32"}, {"name": "103", "dtype": "float32"}, {"name": "104", "dtype": "float32"}, {"name": "105", "dtype": "float32"}, {"name": "106", "dtype": "float32"}, {"name": "107", "dtype": "float32"}, {"name": "108", "dtype": "float32"}, {"name": "109", "dtype": "float32"}, {"name": "110", "dtype": "float32"}, {"name": "111", "dtype": "float32"}, {"name": "112", "dtype": "float32"}, {"name": "113", "dtype": "float32"}, {"name": "114", "dtype": "float32"}, {"name": "115", "dtype": "float32"}, {"name": "116", "dtype": "float32"}, {"name": "117", "dtype": "float32"}, {"name": "118", "dtype": "float32"}, {"name": "119", "dtype": "float32"}, {"name": "120", "dtype": "float32"}, {"name": "121", "dtype": "float32"}, {"name": "122", "dtype": "float32"}, {"name": "123", "dtype": "float32"}, {"name": "124", "dtype": "float32"}, {"name": "125", "dtype": "float32"}, {"name": "126", "dtype": "float32"}, {"name": "127", "dtype": "float32"}, {"name": "128", "dtype": "float32"}, {"name": "129", "dtype": "float32"}, {"name": "130", "dtype": "float32"}, {"name": "131", "dtype": "float32"}, {"name": "132", "dtype": "float32"}, {"name": "133", "dtype": "float32"}, {"name": "134", "dtype": "float32"}, {"name": "135", "dtype": "float32"}, {"name": "136", "dtype": "float32"}, {"name": "137", "dtype": "float32"}, {"name": "138", "dtype": "float32"}, {"name": "139", "dtype": "float32"}, {"name": "140", "dtype": "float32"}, {"name": "141", "dtype": "float32"}, {"name": "142", "dtype": "float32"}, {"name": "143", "dtype": "float32"}, {"name": "144", "dtype": "float32"}, {"name": "145", "dtype": "float32"}, {"name": "146", "dtype": "float32"}, {"name": "147", "dtype": "float32"}, {"name": "148", "dtype": "float32"}, {"name": "149", "dtype": "float32"}, {"name": "150", "dtype": "float32"}, {"name": "151", "dtype": "float32"}, {"name": "152", "dtype": "float32"}, {"name": "153", "dtype": "float32"}, {"name": "154", "dtype": "float32"}, {"name": "155", "dtype": "float32"}, {"name": "156", "dtype": "float32"}, {"name": "157", "dtype": "float32"}, {"name": "158", "dtype": "float32"}, {"name": "159", "dtype": "float32"}, {"name": "160", "dtype": "float32"}, {"name": "161", "dtype": "float32"}, {"name": "162", "dtype": "float32"}, {"name": "163", "dtype": "float32"}, {"name": "164", "dtype": "float32"}, {"name": "165", "dtype": "float32"}, {"name": "166", "dtype": "float32"}, {"name": "167", "dtype": "float32"}, {"name": "168", "dtype": "float32"}, {"name": "169", "dtype": "float32"}, {"name": "170", "dtype": "float32"}, {"name": "171", "dtype": "float32"}, {"name": "172", "dtype": "float32"}, {"name": "173", "dtype": "float32"}, {"name": "174", "dtype": "float32"}, {"name": "175", "dtype": "float32"}, {"name": "176", "dtype": "float32"}, {"name": "177", "dtype": "float32"}, {"name": "178", "dtype": "float32"}, {"name": "179", "dtype": "float32"}, {"name": "180", "dtype": "float32"}, {"name": "181", "dtype": "float32"}, {"name": "182", "dtype": "float32"}, {"name": "183", "dtype": "float32"}, {"name": "184", "dtype": "float32"}, {"name": "185", "dtype": "float32"}, {"name": "186", "dtype": "float32"}, {"name": "187", "dtype": "float32"}, {"name": "188", "dtype": "float32"}, {"name": "189", "dtype": "float32"}, {"name": "190", "dtype": "float32"}, {"name": "191", "dtype": "float32"}, {"name": "192", "dtype": "float32"}, {"name": "193", "dtype": "float32"}, {"name": "194", "dtype": "float32"}, {"name": "195", "dtype": "float32"}, {"name": "196", "dtype": "float32"}, {"name": "197", "dtype": "float32"}, {"name": "198", "dtype": "float32"}, {"name": "199", "dtype": "float32"}, {"name": "200", "dtype": "float32"}, {"name": "201", "dtype": "float32"}, {"name": "202", "dtype": "float32"}, {"name": "203", "dtype": "float32"}, {"name": "204", "dtype": "float32"}, {"name": "205", "dtype": "float32"}, {"name": "206", "dtype": "float32"}, {"name": "207", "dtype": "float32"}, {"name": "208", "dtype": "float32"}, {"name": "209", "dtype": "float32"}, {"name": "210", "dtype": "float32"}, {"name": "211", "dtype": "float32"}, {"name": "212", "dtype": "float32"}, {"name": "213", "dtype": "float32"}, {"name": "214", "dtype": "float32"}, {"name": "215", "dtype": "float32"}, {"name": "216", "dtype": "float32"}, {"name": "217", "dtype": "float32"}, {"name": "218", "dtype": "float32"}, {"name": "219", "dtype": "float32"}, {"name": "220", "dtype": "float32"}, {"name": "221", "dtype": "float32"}, {"name": "222", "dtype": "float32"}, {"name": "223", "dtype": "float32"}, {"name": "224", "dtype": "float32"}, {"name": "225", "dtype": "float32"}, {"name": "226", "dtype": "float32"}, {"name": "227", "dtype": "float32"}, {"name": "228", "dtype": "float32"}, {"name": "229", "dtype": "float32"}, {"name": "230", "dtype": "float32"}, {"name": "231", "dtype": "float32"}, {"name": "232", "dtype": "float32"}, {"name": "233", "dtype": "float32"}, {"name": "234", "dtype": "float32"}, {"name": "235", "dtype": "float32"}, {"name": "236", "dtype": "float32"}, {"name": "237", "dtype": "float32"}, {"name": "238", "dtype": "float32"}, {"name": "239", "dtype": "float32"}, {"name": "240", "dtype": "float32"}, {"name": "241", "dtype": "float32"}, {"name": "242", "dtype": "float32"}, {"name": "243", "dtype": "float32"}, {"name": "244", "dtype": "float32"}, {"name": "245", "dtype": "float32"}, {"name": "246", "dtype": "float32"}, {"name": "247", "dtype": "float32"}, {"name": "248", "dtype": "float32"}, {"name": "249", "dtype": "float32"}, {"name": "250", "dtype": "float32"}, {"name": "251", "dtype": "float32"}, {"name": "252", "dtype": "float32"}, {"name": "253", "dtype": "float32"}, {"name": "254", "dtype": "float32"}, {"name": "255", "dtype": "float32"}, {"name": "256", "dtype": "float32"}, {"name": "257", "dtype": "float32"}, {"name": "258", "dtype": "float32"}, {"name": "259", "dtype": "float32"}, {"name": "260", "dtype": "float32"}, {"name": "261", "dtype": "float32"}, {"name": "262", "dtype": "float32"}, {"name": "263", "dtype": "float32"}, {"name": "264", "dtype": "float32"}, {"name": "265", "dtype": "float32"}, {"name": "266", "dtype": "float32"}, {"name": "267", "dtype": "float32"}, {"name": "268", "dtype": "float32"}, {"name": "269", "dtype": "float32"}, {"name": "270", "dtype": "float32"}, {"name": "271", "dtype": "float32"}, {"name": "272", "dtype": "float32"}, {"name": "273", "dtype": "float32"}, {"name": "274", "dtype": "float32"}, {"name": "275", "dtype": "float32"}, {"name": "276", "dtype": "float32"}, {"name": "277", "dtype": "float32"}, {"name": "278", "dtype": "float32"}, {"name": "279", "dtype": "float32"}, {"name": "280", "dtype": "float32"}, {"name": "281", "dtype": "float32"}, {"name": "282", "dtype": "float32"}, {"name": "283", "dtype": "float32"}, {"name": "284", "dtype": "float32"}, {"name": "285", "dtype": "float32"}, {"name": "286", "dtype": "float32"}, {"name": "287", "dtype": "float32"}, {"name": "288", "dtype": "float32"}, {"name": "289", "dtype": "float32"}, {"name": "290", "dtype": "float32"}, {"name": "291", "dtype": "float32"}, {"name": "292", "dtype": "float32"}, {"name": "293", "dtype": "float32"}, {"name": "294", "dtype": "float32"}, {"name": "295", "dtype": "float32"}, {"name": "296", "dtype": "float32"}, {"name": "297", "dtype": "float32"}, {"name": "298", "dtype": "float32"}, {"name": "299", "dtype": "float32"}, {"name": "300", "dtype": "float32"}, {"name": "301", "dtype": "float32"}, {"name": "302", "dtype": "float32"}, {"name": "303", "dtype": "float32"}, {"name": "304", "dtype": "float32"}, {"name": "305", "dtype": "float32"}, {"name": "306", "dtype": "float32"}, {"name": "307", "dtype": "float32"}, {"name": "308", "dtype": "float32"}, {"name": "309", "dtype": "float32"}, {"name": "310", "dtype": "float32"}, {"name": "311", "dtype": "float32"}, {"name": "312", "dtype": "float32"}, {"name": "313", "dtype": "float32"}, {"name": "314", "dtype": "float32"}, {"name": "315", "dtype": "float32"}, {"name": "316", "dtype": "float32"}, {"name": "317", "dtype": "float32"}, {"name": "318", "dtype": "float32"}, {"name": "319", "dtype": "float32"}, {"name": "320", "dtype": "float32"}, {"name": "321", "dtype": "float32"}, {"name": "322", "dtype": "float32"}, {"name": "323", "dtype": "float32"}, {"name": "324", "dtype": "float32"}, {"name": "325", "dtype": "float32"}, {"name": "326", "dtype": "float32"}, {"name": "327", "dtype": "float32"}, {"name": "328", "dtype": "float32"}, {"name": "329", "dtype": "float32"}, {"name": "330", "dtype": "float32"}, {"name": "331", "dtype": "float32"}, {"name": "332", "dtype": "float32"}, {"name": "333", "dtype": "float32"}, {"name": "334", "dtype": "float32"}, {"name": "335", "dtype": "float32"}, {"name": "336", "dtype": "float32"}, {"name": "337", "dtype": "float32"}, {"name": "338", "dtype": "float32"}, {"name": "339", "dtype": "float32"}, {"name": "340", "dtype": "float32"}, {"name": "341", "dtype": "float32"}, {"name": "342", "dtype": "float32"}, {"name": "343", "dtype": "float32"}, {"name": "344", "dtype": "float32"}, {"name": "345", "dtype": "float32"}, {"name": "346", "dtype": "float32"}, {"name": "347", "dtype": "float32"}, {"name": "348", "dtype": "float32"}, {"name": "349", "dtype": "float32"}, {"name": "350", "dtype": "float32"}, {"name": "351", "dtype": "float32"}, {"name": "352", "dtype": "float32"}, {"name": "353", "dtype": "float32"}, {"name": "354", "dtype": "float32"}, {"name": "355", "dtype": "float32"}, {"name": "356", "dtype": "float32"}, {"name": "357", "dtype": "float32"}, {"name": "358", "dtype": "float32"}, {"name": "359", "dtype": "float32"}, {"name": "360", "dtype": "float32"}, {"name": "361", "dtype": "float32"}, {"name": "362", "dtype": "float32"}, {"name": "363", "dtype": "float32"}, {"name": "364", "dtype": "float32"}, {"name": "365", "dtype": "float32"}, {"name": "366", "dtype": "float32"}, {"name": "367", "dtype": "float32"}, {"name": "368", "dtype": "float32"}, {"name": "369", "dtype": "float32"}, {"name": "370", "dtype": "float32"}, {"name": "371", "dtype": "float32"}, {"name": "372", "dtype": "float32"}, {"name": "373", "dtype": "float32"}, {"name": "374", "dtype": "float32"}, {"name": "375", "dtype": "float32"}, {"name": "376", "dtype": "float32"}, {"name": "377", "dtype": "float32"}, {"name": "378", "dtype": "float32"}, {"name": "379", "dtype": "float32"}, {"name": "380", "dtype": "float32"}, {"name": "381", "dtype": "float32"}, {"name": "382", "dtype": "float32"}, {"name": "383", "dtype": "float32"}, {"name": "384", "dtype": "float32"}, {"name": "385", "dtype": "float32"}, {"name": "386", "dtype": "float32"}, {"name": "387", "dtype": "float32"}, {"name": "388", "dtype": "float32"}, {"name": "389", "dtype": "float32"}, {"name": "390", "dtype": "float32"}, {"name": "391", "dtype": "float32"}, {"name": "392", "dtype": "float32"}, {"name": "393", "dtype": "float32"}, {"name": "394", "dtype": "float32"}, {"name": "395", "dtype": "float32"}, {"name": "396", "dtype": "float32"}, {"name": "397", "dtype": "float32"}, {"name": "398", "dtype": "float32"}, {"name": "399", "dtype": "float32"}, {"name": "400", "dtype": "float32"}, {"name": "401", "dtype": "float32"}, {"name": "402", "dtype": "float32"}, {"name": "403", "dtype": "float32"}, {"name": "404", "dtype": "float32"}, {"name": "405", "dtype": "float32"}, {"name": "406", "dtype": "float32"}, {"name": "407", "dtype": "float32"}, {"name": "408", "dtype": "float32"}, {"name": "409", "dtype": "float32"}, {"name": "410", "dtype": "float32"}, {"name": "411", "dtype": "float32"}, {"name": "412", "dtype": "float32"}, {"name": "413", "dtype": "float32"}, {"name": "414", "dtype": "float32"}, {"name": "415", "dtype": "float32"}, {"name": "416", "dtype": "float32"}, {"name": "417", "dtype": "float32"}, {"name": "418", "dtype": "float32"}, {"name": "419", "dtype": "float32"}, {"name": "420", "dtype": "float32"}, {"name": "421", "dtype": "float32"}, {"name": "422", "dtype": "float32"}, {"name": "423", "dtype": "float32"}, {"name": "424", "dtype": "float32"}, {"name": "425", "dtype": "float32"}, {"name": "426", "dtype": "float32"}, {"name": "427", "dtype": "float32"}, {"name": "428", "dtype": "float32"}, {"name": "429", "dtype": "float32"}, {"name": "430", "dtype": "float32"}, {"name": "431", "dtype": "float32"}, {"name": "432", "dtype": "float32"}, {"name": "433", "dtype": "float32"}, {"name": "434", "dtype": "float32"}, {"name": "435", "dtype": "float32"}, {"name": "436", "dtype": "float32"}, {"name": "437", "dtype": "float32"}, {"name": "438", "dtype": "float32"}, {"name": "439", "dtype": "float32"}, {"name": "440", "dtype": "float32"}, {"name": "441", "dtype": "float32"}, {"name": "442", "dtype": "float32"}, {"name": "443", "dtype": "float32"}, {"name": "444", "dtype": "float32"}, {"name": "445", "dtype": "float32"}, {"name": "446", "dtype": "float32"}, {"name": "447", "dtype": "float32"}, {"name": "448", "dtype": "float32"}, {"name": "449", "dtype": "float32"}, {"name": "450", "dtype": "float32"}, {"name": "451", "dtype": "float32"}, {"name": "452", "dtype": "float32"}, {"name": "453", "dtype": "float32"}, {"name": "454", "dtype": "float32"}, {"name": "455", "dtype": "float32"}, {"name": "456", "dtype": "float32"}, {"name": "457", "dtype": "float32"}, {"name": "458", "dtype": "float32"}, {"name": "459", "dtype": "float32"}, {"name": "460", "dtype": "float32"}, {"name": "461", "dtype": "float32"}, {"name": "462", "dtype": "float32"}, {"name": "463", "dtype": "float32"}, {"name": "464", "dtype": "float32"}, {"name": "465", "dtype": "float32"}, {"name": "466", "dtype": "float32"}, {"name": "467", "dtype": "float32"}, {"name": "468", "dtype": "float32"}, {"name": "469", "dtype": "float32"}, {"name": "470", "dtype": "float32"}, {"name": "471", "dtype": "float32"}, {"name": "472", "dtype": "float32"}, {"name": "473", "dtype": "float32"}, {"name": "474", "dtype": "float32"}, {"name": "475", "dtype": "float32"}, {"name": "476", "dtype": "float32"}, {"name": "477", "dtype": "float32"}, {"name": "478", "dtype": "float32"}, {"name": "479", "dtype": "float32"}, {"name": "480", "dtype": "float32"}, {"name": "481", "dtype": "float32"}, {"name": "482", "dtype": "float32"}, {"name": "483", "dtype": "float32"}, {"name": "484", "dtype": "float32"}, {"name": "485", "dtype": "float32"}, {"name": "486", "dtype": "float32"}, {"name": "487", "dtype": "float32"}, {"name": "488", "dtype": "float32"}, {"name": "489", "dtype": "float32"}, {"name": "490", "dtype": "float32"}, {"name": "491", "dtype": "float32"}, {"name": "492", "dtype": "float32"}, {"name": "493", "dtype": "float32"}, {"name": "494", "dtype": "float32"}, {"name": "495", "dtype": "float32"}, {"name": "496", "dtype": "float32"}, {"name": "497", "dtype": "float32"}, {"name": "498", "dtype": "float32"}, {"name": "499", "dtype": "float32"}, {"name": "500", "dtype": "float32"}, {"name": "501", "dtype": "float32"}, {"name": "502", "dtype": "float32"}, {"name": "503", "dtype": "float32"}, {"name": "504", "dtype": "float32"}, {"name": "505", "dtype": "float32"}, {"name": "506", "dtype": "float32"}, {"name": "507", "dtype": "float32"}, {"name": "508", "dtype": "float32"}, {"name": "509", "dtype": "float32"}, {"name": "510", "dtype": "float32"}, {"name": "511", "dtype": "float32"}, {"name": "512", "dtype": "float32"}, {"name": "513", "dtype": "float32"}, {"name": "514", "dtype": "float32"}, {"name": "515", "dtype": "float32"}, {"name": "516", "dtype": "float32"}, {"name": "517", "dtype": "float32"}, {"name": "518", "dtype": "float32"}, {"name": "519", "dtype": "float32"}, {"name": "520", "dtype": "float32"}, {"name": "521", "dtype": "float32"}, {"name": "522", "dtype": "float32"}, {"name": "523", "dtype": "float32"}, {"name": "524", "dtype": "float32"}, {"name": "525", "dtype": "float32"}, {"name": "526", "dtype": "float32"}, {"name": "527", "dtype": "float32"}, {"name": "528", "dtype": "float32"}, {"name": "529", "dtype": "float32"}, {"name": "530", "dtype": "float32"}, {"name": "531", "dtype": "float32"}, {"name": "532", "dtype": "float32"}, {"name": "533", "dtype": "float32"}, {"name": "534", "dtype": "float32"}, {"name": "535", "dtype": "float32"}, {"name": "536", "dtype": "float32"}, {"name": "537", "dtype": "float32"}, {"name": "538", "dtype": "float32"}, {"name": "539", "dtype": "float32"}, {"name": "540", "dtype": "float32"}, {"name": "541", "dtype": "float32"}, {"name": "542", "dtype": "float32"}, {"name": "543", "dtype": "float32"}, {"name": "544", "dtype": "float32"}, {"name": "545", "dtype": "float32"}, {"name": "546", "dtype": "float32"}, {"name": "547", "dtype": "float32"}, {"name": "548", "dtype": "float32"}, {"name": "549", "dtype": "float32"}, {"name": "550", "dtype": "float32"}, {"name": "551", "dtype": "float32"}, {"name": "552", "dtype": "float32"}, {"name": "553", "dtype": "float32"}, {"name": "554", "dtype": "float32"}, {"name": "555", "dtype": "float32"}, {"name": "556", "dtype": "float32"}, {"name": "557", "dtype": "float32"}, {"name": "558", "dtype": "float32"}, {"name": "559", "dtype": "float32"}, {"name": "560", "dtype": "float32"}, {"name": "561", "dtype": "float32"}, {"name": "562", "dtype": "float32"}, {"name": "563", "dtype": "float32"}, {"name": "564", "dtype": "float32"}, {"name": "565", "dtype": "float32"}, {"name": "566", "dtype": "float32"}, {"name": "567", "dtype": "float32"}, {"name": "568", "dtype": "float32"}, {"name": "569", "dtype": "float32"}, {"name": "570", "dtype": "float32"}, {"name": "571", "dtype": "float32"}, {"name": "572", "dtype": "float32"}, {"name": "573", "dtype": "float32"}, {"name": "574", "dtype": "float32"}, {"name": "575", "dtype": "float32"}, {"name": "576", "dtype": "float32"}, {"name": "577", "dtype": "float32"}, {"name": "578", "dtype": "float32"}, {"name": "579", "dtype": "float32"}, {"name": "580", "dtype": "float32"}, {"name": "581", "dtype": "float32"}, {"name": "582", "dtype": "float32"}, {"name": "583", "dtype": "float32"}, {"name": "584", "dtype": "float32"}, {"name": "585", "dtype": "float32"}, {"name": "586", "dtype": "float32"}, {"name": "587", "dtype": "float32"}, {"name": "588", "dtype": "float32"}, {"name": "589", "dtype": "float32"}, {"name": "590", "dtype": "float32"}, {"name": "591", "dtype": "float32"}, {"name": "592", "dtype": "float32"}, {"name": "593", "dtype": "float32"}, {"name": "594", "dtype": "float32"}, {"name": "595", "dtype": "float32"}, {"name": "596", "dtype": "float32"}, {"name": "597", "dtype": "float32"}, {"name": "598", "dtype": "float32"}, {"name": "599", "dtype": "float32"}, {"name": "600", "dtype": "float32"}, {"name": "601", "dtype": "float32"}, {"name": "602", "dtype": "float32"}, {"name": "603", "dtype": "float32"}, {"name": "604", "dtype": "float32"}, {"name": "605", "dtype": "float32"}, {"name": "606", "dtype": "float32"}, {"name": "607", "dtype": "float32"}, {"name": "608", "dtype": "float32"}, {"name": "609", "dtype": "float32"}, {"name": "610", "dtype": "float32"}, {"name": "611", "dtype": "float32"}, {"name": "612", "dtype": "float32"}, {"name": "613", "dtype": "float32"}, {"name": "614", "dtype": "float32"}, {"name": "615", "dtype": "float32"}, {"name": "616", "dtype": "float32"}, {"name": "617", "dtype": "float32"}, {"name": "618", "dtype": "float32"}, {"name": "619", "dtype": "float32"}, {"name": "620", "dtype": "float32"}, {"name": "621", "dtype": "float32"}, {"name": "622", "dtype": "float32"}, {"name": "623", "dtype": "float32"}, {"name": "624", "dtype": "float32"}, {"name": "625", "dtype": "float32"}, {"name": "626", "dtype": "float32"}, {"name": "627", "dtype": "float32"}, {"name": "628", "dtype": "float32"}, {"name": "629", "dtype": "float32"}, {"name": "630", "dtype": "float32"}, {"name": "631", "dtype": "float32"}, {"name": "632", "dtype": "float32"}, {"name": "633", "dtype": "float32"}, {"name": "634", "dtype": "float32"}, {"name": "635", "dtype": "float32"}, {"name": "636", "dtype": "float32"}, {"name": "637", "dtype": "float32"}, {"name": "638", "dtype": "float32"}, {"name": "639", "dtype": "float32"}, {"name": "640", "dtype": "float32"}, {"name": "641", "dtype": "float32"}, {"name": "642", "dtype": "float32"}, {"name": "643", "dtype": "float32"}, {"name": "644", "dtype": "float32"}, {"name": "645", "dtype": "float32"}, {"name": "646", "dtype": "float32"}, {"name": "647", "dtype": "float32"}, {"name": "648", "dtype": "float32"}, {"name": "649", "dtype": "float32"}, {"name": "650", "dtype": "float32"}, {"name": "651", "dtype": "float32"}, {"name": "652", "dtype": "float32"}, {"name": "653", "dtype": "float32"}, {"name": "654", "dtype": "float32"}, {"name": "655", "dtype": "float32"}, {"name": "656", "dtype": "float32"}, {"name": "657", "dtype": "float32"}, {"name": "658", "dtype": "float32"}, {"name": "659", "dtype": "float32"}, {"name": "660", "dtype": "float32"}, {"name": "661", "dtype": "float32"}, {"name": "662", "dtype": "float32"}, {"name": "663", "dtype": "float32"}, {"name": "664", "dtype": "float32"}, {"name": "665", "dtype": "float32"}, {"name": "666", "dtype": "float32"}, {"name": "667", "dtype": "float32"}, {"name": "668", "dtype": "float32"}, {"name": "669", "dtype": "float32"}, {"name": "670", "dtype": "float32"}, {"name": "671", "dtype": "float32"}, {"name": "672", "dtype": "float32"}, {"name": "673", "dtype": "float32"}, {"name": "674", "dtype": "float32"}, {"name": "675", "dtype": "float32"}, {"name": "676", "dtype": "float32"}, {"name": "677", "dtype": "float32"}, {"name": "678", "dtype": "float32"}, {"name": "679", "dtype": "float32"}, {"name": "680", "dtype": "float32"}, {"name": "681", "dtype": "float32"}, {"name": "682", "dtype": "float32"}, {"name": "683", "dtype": "float32"}, {"name": "684", "dtype": "float32"}, {"name": "685", "dtype": "float32"}, {"name": "686", "dtype": "float32"}, {"name": "687", "dtype": "float32"}, {"name": "688", "dtype": "float32"}, {"name": "689", "dtype": "float32"}, {"name": "690", "dtype": "float32"}, {"name": "691", "dtype": "float32"}, {"name": "692", "dtype": "float32"}, {"name": "693", "dtype": "float32"}, {"name": "694", "dtype": "float32"}, {"name": "695", "dtype": "float32"}, {"name": "696", "dtype": "float32"}, {"name": "697", "dtype": "float32"}, {"name": "698", "dtype": "float32"}, {"name": "699", "dtype": "float32"}, {"name": "700", "dtype": "float32"}, {"name": "701", "dtype": "float32"}, {"name": "702", "dtype": "float32"}, {"name": "703", "dtype": "float32"}, {"name": "704", "dtype": "float32"}, {"name": "705", "dtype": "float32"}, {"name": "706", "dtype": "float32"}, {"name": "707", "dtype": "float32"}, {"name": "708", "dtype": "float32"}, {"name": "709", "dtype": "float32"}, {"name": "710", "dtype": "float32"}, {"name": "711", "dtype": "float32"}, {"name": "712", "dtype": "float32"}, {"name": "713", "dtype": "float32"}, {"name": "714", "dtype": "float32"}, {"name": "715", "dtype": "float32"}, {"name": "716", "dtype": "float32"}, {"name": "717", "dtype": "float32"}, {"name": "718", "dtype": "float32"}, {"name": "719", "dtype": "float32"}, {"name": "720", "dtype": "float32"}, {"name": "721", "dtype": "float32"}, {"name": "722", "dtype": "float32"}, {"name": "723", "dtype": "float32"}, {"name": "724", "dtype": "float32"}, {"name": "725", "dtype": "float32"}, {"name": "726", "dtype": "float32"}, {"name": "727", "dtype": "float32"}, {"name": "728", "dtype": "float32"}, {"name": "729", "dtype": "float32"}, {"name": "730", "dtype": "float32"}, {"name": "731", "dtype": "float32"}, {"name": "732", "dtype": "float32"}, {"name": "733", "dtype": "float32"}, {"name": "734", "dtype": "float32"}, {"name": "735", "dtype": "float32"}, {"name": "736", "dtype": "float32"}, {"name": "737", "dtype": "float32"}, {"name": "738", "dtype": "float32"}, {"name": "739", "dtype": "float32"}, {"name": "740", "dtype": "float32"}, {"name": "741", "dtype": "float32"}, {"name": "742", "dtype": "float32"}, {"name": "743", "dtype": "float32"}, {"name": "744", "dtype": "float32"}, {"name": "745", "dtype": "float32"}, {"name": "746", "dtype": "float32"}, {"name": "747", "dtype": "float32"}, {"name": "748", "dtype": "float32"}, {"name": "749", "dtype": "float32"}, {"name": "750", "dtype": "float32"}, {"name": "751", "dtype": "float32"}, {"name": "752", "dtype": "float32"}, {"name": "753", "dtype": "float32"}, {"name": "754", "dtype": "float32"}, {"name": "755", "dtype": "float32"}, {"name": "756", "dtype": "float32"}, {"name": "757", "dtype": "float32"}, {"name": "758", "dtype": "float32"}, {"name": "759", "dtype": "float32"}, {"name": "760", "dtype": "float32"}, {"name": "761", "dtype": "float32"}, {"name": "762", "dtype": "float32"}, {"name": "763", "dtype": "float32"}, {"name": "764", "dtype": "float32"}, {"name": "765", "dtype": "float32"}, {"name": "766", "dtype": "float32"}, {"name": "767", "dtype": "float32"}, {"name": "label", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 115582709.0625, "num_examples": 37500}, {"name": "test", "num_bytes": 38527570.0, "num_examples": 12500}], "download_size": 211873362, "dataset_size": 154110279.0625}}
2023-08-18T14:27:40+00:00
[]
[]
TAGS #region-us
# Dataset Card for "BGL_GPT2_Baseline" More Information needed
[ "# Dataset Card for \"BGL_GPT2_Baseline\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"BGL_GPT2_Baseline\"\n\nMore Information needed" ]
[ 6, 19 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"BGL_GPT2_Baseline\"\n\nMore Information needed" ]
86dfced3abcacc3c9b5539273f98c6984514653a
# Dataset of doremy_sweet/ドレミー・スイート/도레미스위트 (Touhou) This is the dataset of doremy_sweet/ドレミー・スイート/도레미스위트 (Touhou), containing 500 images and their tags. The core tags of this character are `hat, short_hair, blue_hair, blue_eyes, red_headwear, tail, bangs, tapir_tail`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:---------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 500 | 611.23 MiB | [Download](https://huggingface.co/datasets/CyberHarem/doremy_sweet_touhou/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 500 | 340.99 MiB | [Download](https://huggingface.co/datasets/CyberHarem/doremy_sweet_touhou/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 1128 | 697.95 MiB | [Download](https://huggingface.co/datasets/CyberHarem/doremy_sweet_touhou/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 500 | 533.73 MiB | [Download](https://huggingface.co/datasets/CyberHarem/doremy_sweet_touhou/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 1128 | 989.63 MiB | [Download](https://huggingface.co/datasets/CyberHarem/doremy_sweet_touhou/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/doremy_sweet_touhou', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 0 | 20 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, nightcap, pom_pom_(clothes), solo, black_capelet, looking_at_viewer, simple_background, smile, white_dress, white_background, blush, upper_body, closed_mouth, :3 | | 1 | 28 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, nightcap, pom_pom_(clothes), smile, solo, white_dress, black_capelet, multicolored_dress, looking_at_viewer, holding_book, dream_soul, black_dress, simple_background, full_body, white_background, open_mouth, white_footwear, blob, closed_mouth, white_socks | | 2 | 7 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, blob, dream_soul, dress, nightcap, pom_pom_(clothes), smile, solo, book, looking_at_viewer, open_mouth | | 3 | 8 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, blob, dream_soul, dress, nightcap, pom_pom_(clothes), simple_background, solo, white_background, book, smile, looking_at_viewer, short_sleeves | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | nightcap | pom_pom_(clothes) | solo | black_capelet | looking_at_viewer | simple_background | smile | white_dress | white_background | blush | upper_body | closed_mouth | :3 | multicolored_dress | holding_book | dream_soul | black_dress | full_body | open_mouth | white_footwear | blob | white_socks | dress | book | short_sleeves | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:-----------|:--------------------|:-------|:----------------|:--------------------|:--------------------|:--------|:--------------|:-------------------|:--------|:-------------|:---------------|:-----|:---------------------|:---------------|:-------------|:--------------|:------------|:-------------|:-----------------|:-------|:--------------|:--------|:-------|:----------------| | 0 | 20 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | 1 | 28 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | X | X | X | X | X | X | X | X | | | X | | X | X | X | X | X | X | X | X | X | | | | | 2 | 7 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | X | X | X | | X | | X | | | | | | | | | X | | | X | | X | | X | X | | | 3 | 8 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | X | X | X | | X | X | X | | X | | | | | | | X | | | | | X | | X | X | X |
CyberHarem/doremy_sweet_touhou
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-18T14:03:49+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-01-14T21:04:51+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of doremy\_sweet/ドレミー・スイート/도레미스위트 (Touhou) ================================================== This is the dataset of doremy\_sweet/ドレミー・スイート/도레미스위트 (Touhou), containing 500 images and their tags. The core tags of this character are 'hat, short\_hair, blue\_hair, blue\_eyes, red\_headwear, tail, bangs, tapir\_tail', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ 44, 61, 5, 4 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.### Raw Text Version### Table Version" ]
a8569e1504bd818714cdcabdff668557ed269803
# Note process data from [Chinese-medical-dialogue-data](https://github.com/Toyhom/Chinese-medical-dialogue-data) 单轮医患对话 ## raw data samples |department|title|ask|answer| |----------|-----|---|------| |心血管科|高血压患者能吃党参吗?|我有高血压这两天女婿来的时候给我拿了些党参泡水喝,您好高血压可以吃党参吗?|高血压病人可以口服党参的。党参有降血脂,降血压的作用,可以彻底消除血液中的垃圾,从而对冠心病以及心血管疾病的患者都有一定的稳定预防工作作用,因此平时口服党参能远离三高的危害。另外党参除了益气养血,降低中枢神经作用,调整消化系统功能,健脾补肺的功能。感谢您的进行咨询,期望我的解释对你有所帮助。| |内分泌科|糖尿病还会进行遗传吗?|糖尿病有隔代遗传吗?我妈是糖尿病,很多年了,也没养好,我现在也是,我妹子也是,我儿子现在二十岁,没什么问题,但是以后会不会也得糖尿病啊,真是难过,我现在就已经开始让他控制点吃东西。|2型糖尿病的隔代遗传概率为父母患糖尿病,临产的发生率为40%,比一般人患糖尿病,疾病,如何更重要的选择因素基于生活方式的,后天也隔代遗传隔代遗传易感性更公正,增强患糖尿病的风险,低糖低脂肪,平时清淡饮食,适当锻练,增强监测数据,血糖仪买个备取。| |内分泌科|糖尿病会出现什么症状?|我是不是糖尿病,如何严重,糖尿病的典型症状有哪些?血糖高之后感觉什么东西都不能够吃了,有糖分的东西都不敢吃,怕血糖又高,不知晓是不是变严重了,糖尿病的症状有哪些?|你好,根据你描述的情况看来糖尿病是可以致使血糖异常下降的,可以再次出现三多一少的症状,如喝水多,小便多,饭量大,体重减轻,建议你尽快复诊当地医院内分泌科看一看,需要有让大夫仔细检查你的血糖水平,明确有否糖尿病的情况,及时动用降糖药治疗,平时一定少吃甜食,足量锻练。| ## processed data sample ```json [ {"instruction":"title", "input":"ask", "output":"answer", "history":None}, ] ```
ticoAg/Chinese-medical-dialogue
[ "license:apache-2.0", "region:us" ]
2023-08-18T14:25:05+00:00
{"license": "apache-2.0", "raw csv": "356 MB", "examples": 799743}
2023-08-18T14:33:15+00:00
[]
[]
TAGS #license-apache-2.0 #region-us
Note ==== process data from Chinese-medical-dialogue-data 单轮医患对话 raw data samples ---------------- processed data sample ---------------------
[]
[ "TAGS\n#license-apache-2.0 #region-us \n" ]
[ 14 ]
[ "passage: TAGS\n#license-apache-2.0 #region-us \n" ]
0d68b25513cce7b863b37292106b7510a947e96b
# Dataset of seiran/清蘭/세이란 (Touhou) This is the dataset of seiran/清蘭/세이란 (Touhou), containing 500 images and their tags. The core tags of this character are `animal_ears, rabbit_ears, blue_hair, red_eyes, long_hair, bangs, twintails`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:---------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 500 | 582.33 MiB | [Download](https://huggingface.co/datasets/CyberHarem/seiran_touhou/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 500 | 350.35 MiB | [Download](https://huggingface.co/datasets/CyberHarem/seiran_touhou/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 1189 | 751.51 MiB | [Download](https://huggingface.co/datasets/CyberHarem/seiran_touhou/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 500 | 526.77 MiB | [Download](https://huggingface.co/datasets/CyberHarem/seiran_touhou/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 1189 | 1.01 GiB | [Download](https://huggingface.co/datasets/CyberHarem/seiran_touhou/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/seiran_touhou', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 0 | 5 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, blue_dress, crescent_print, earclip, frills, holding, kine, puffy_short_sleeves, solo, star_print, white_background, blush, closed_mouth, looking_at_viewer, smile, simple_background | | 1 | 6 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, blue_dress, crescent_print, earclip, frills, holding, kine, open_mouth, puffy_short_sleeves, smile, solo, star_print, blush, hair_between_eyes, one-hour_drawing_challenge, rabbit_tail | | 2 | 6 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, blue_dress, blush, hair_between_eyes, puffy_short_sleeves, simple_background, solo, white_background, looking_at_viewer, open_mouth, blue_skirt, upper_body | | 3 | 11 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, looking_at_viewer, solo, playboy_bunny, rabbit_tail, blush, wrist_cuffs, bare_shoulders, alternate_costume, bowtie, detached_collar, earclip, simple_background, covered_navel, white_background, ass, black_pantyhose, blue_leotard, closed_mouth, large_breasts, low_twintails, open_mouth, smile, standing | | 4 | 17 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | white_shirt, collared_shirt, red_necktie, long_sleeves, 1girl, looking_at_viewer, black_jacket, blazer, pink_skirt, solo, smile, hair_between_eyes, pleated_skirt, closed_mouth, purple_hair, standing, blush, crescent_pin, multiple_girls, open_mouth | | 5 | 7 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | 1girl, blush, hetero, 1boy, nipples, open_mouth, penis, solo_focus, nude, pussy, small_breasts, earclip, mosaic_censoring, navel, sex, tears, blue_dress, cum, looking_at_viewer, star_print, vaginal | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | blue_dress | crescent_print | earclip | frills | holding | kine | puffy_short_sleeves | solo | star_print | white_background | blush | closed_mouth | looking_at_viewer | smile | simple_background | open_mouth | hair_between_eyes | one-hour_drawing_challenge | rabbit_tail | blue_skirt | upper_body | playboy_bunny | wrist_cuffs | bare_shoulders | alternate_costume | bowtie | detached_collar | covered_navel | ass | black_pantyhose | blue_leotard | large_breasts | low_twintails | standing | white_shirt | collared_shirt | red_necktie | long_sleeves | black_jacket | blazer | pink_skirt | pleated_skirt | purple_hair | crescent_pin | multiple_girls | hetero | 1boy | nipples | penis | solo_focus | nude | pussy | small_breasts | mosaic_censoring | navel | sex | tears | cum | vaginal | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:-------------|:-----------------|:----------|:---------|:----------|:-------|:----------------------|:-------|:-------------|:-------------------|:--------|:---------------|:--------------------|:--------|:--------------------|:-------------|:--------------------|:-----------------------------|:--------------|:-------------|:-------------|:----------------|:--------------|:-----------------|:--------------------|:---------|:------------------|:----------------|:------|:------------------|:---------------|:----------------|:----------------|:-----------|:--------------|:-----------------|:--------------|:---------------|:---------------|:---------|:-------------|:----------------|:--------------|:---------------|:-----------------|:---------|:-------|:----------|:--------|:-------------|:-------|:--------|:----------------|:-------------------|:--------|:------|:--------|:------|:----------| | 0 | 5 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 1 | 6 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | X | X | X | X | X | X | X | X | | X | | | X | | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 2 | 6 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | X | | | | | | X | X | | X | X | | X | | X | X | X | | | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 3 | 11 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | | | X | | | | | X | | X | X | X | X | X | X | X | | | X | | | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | 4 | 17 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | X | | | | | | | | X | | | X | X | X | X | | X | X | | | | | | | | | | | | | | | | | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | 5 | 7 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | X | X | | X | | | | | | X | | X | | X | | | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | X | X | X | X | X | X | X | X | X | X | X | X | X | X |
CyberHarem/seiran_touhou
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-18T14:25:24+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-01-14T20:59:08+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of seiran/清蘭/세이란 (Touhou) ================================= This is the dataset of seiran/清蘭/세이란 (Touhou), containing 500 images and their tags. The core tags of this character are 'animal\_ears, rabbit\_ears, blue\_hair, red\_eyes, long\_hair, bangs, twintails', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ 44, 61, 5, 4 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.### Raw Text Version### Table Version" ]
291122ca64fb9ba03413c1535bc3bc88246da00d
# Dataset Card for "llama2-politosphere-fine-tuning" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
OneFly7/llama2-politosphere-fine-tuning
[ "region:us" ]
2023-08-18T14:32:40+00:00
{"dataset_info": {"features": [{"name": "text", "dtype": "string"}, {"name": "label_text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 24345, "num_examples": 113}, {"name": "validation", "num_bytes": 22093, "num_examples": 113}], "download_size": 26501, "dataset_size": 46438}}
2023-08-20T06:51:25+00:00
[]
[]
TAGS #region-us
# Dataset Card for "llama2-politosphere-fine-tuning" More Information needed
[ "# Dataset Card for \"llama2-politosphere-fine-tuning\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"llama2-politosphere-fine-tuning\"\n\nMore Information needed" ]
[ 6, 21 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"llama2-politosphere-fine-tuning\"\n\nMore Information needed" ]
caeafcd0896d9b3a1d1086ff80aeff3214c41356
# Dataset Card for ChatMed ## Dataset Description - **Homepage:** https://huggingface.co/datasets/michaelwzhu/ChatMed-Datasets - **Repository:** https://github.com/michael-wzhu/ChatMed - **Paper:** - **Leaderboard:** - **Point of Contact:** Wei Zhu ### Dataset Summary ChatMed-Dataset is a dataset of 110,113 medical query-response pairs (in Chinese) generated by OpenAI's `GPT-3.5` engine. The queries are crawled from several online medical consultation sites, reflecting the medical needs in the real world. The responses are generated by the OpenAI engine. This dataset is designated to to inject medical knowledge into Chinese large language models. The dataset size growing rapidly. Stay tuned for updates! 在大模型时代,模型都是趋同的,比如开源社区由于LlaMa的"开源",出现了各种stanford-alpaca等一系列的开源模型。但是中文垂直领域上的AGI数据集相对匮乏。 本数据集, ChatMed-Dataset, 中的query(或者是prompt)来自于互联网上的医疗问诊问题(110,113),反映了真实世界的不同用户/患者的医疗问诊需求。目前response都是由OpenAI `GPT-3.5`引擎回答的。我们后续会对互联网上的医生回答与患者回答进行筛选甄别,择优选择,构建质量更优的数据集。 ### Supported Tasks and Leaderboards The ChatMed-Dataset designed for fine-tuning pretrained language models to perform better in automatic medical consultations. ### Languages The data in ChatMed-Dataset are in Chinese (zh). ## Dataset Structure ### Data Instances The dataset is in json-line format and is very convenient to use! An example in the ChatMed-Dataset looks as follows: ```json { "query": "你好,视神经萎缩,黄斑变性。怎样缓解,及日常注意事项。谢谢。", "response": "视神经萎缩和黄斑变性是两种不同的疾病,需要分别进行治疗。\n\n视神经萎缩是指视神经逐渐受损,导致视力逐渐减退,常见的治疗方法包括:\n\n1.药物治疗:如促进视神经细胞再生的药物,改善视网膜供血的药物等。\n\n2.手术治疗:如视神经萎缩的手术治疗可以通过神经移植、修复和再生等手术方式来缓解症状。\n\n3.康复训练:通过视力训练等康复训练可以帮助患者恢复视力。\n\n黄斑变性则是指黄斑区域受损,导致中央视力减退,常见的治疗方法包括:\n\n1.药物治疗:如抗氧化剂、维生素等可以缓解症状。\n\n2.手术治疗:如黄斑脱离手术等可以恢复视力。\n\n3.日常注意事项:保持良好的生活习惯,如戒烟、控制血压、血糖等,避免暴晒和过度用眼,保持充足的睡眠和均衡的饮食等。\n\n需要根据具体情况选择合适的治疗方案,建议您咨询专业医生进行诊断和治疗。" } ``` ### Data Fields The data fields are as follows: * `query`: real-life medical query from the web. * `response`: response generated by the OpenAI `GPT-3.5` engine. ### Data Splits | | train | |---------------|------:| | ChatMed-Dataset | 110,113 | ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset > We believe that releasing the above assets will enable the academic community to perform controlled scientific studies on how large language models responses to real-life medical query. > More social impacts will be discussed soon! ### Discussion of Biases [More Information Needed] ### Other Known Limitations The `ChatMed-Dataset` data is generated by a language model (`GPT-3.5`) and inevitably contains some errors or biases. We encourage users to use this data with caution and propose new methods to filter or improve the imperfections. ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information The dataset is available under the [Creative Commons NonCommercial (CC BY-NC 4.0)](https://creativecommons.org/licenses/by-nc/4.0/legalcode). ### Citation Information ``` @misc{alpaca, author = {Wei Zhu }, title = {ChatMed-Dataset: An GPT generated medical query-response datasets for medcial large language models}, year = {2023}, publisher = {GitHub}, journal = {GitHub repository}, howpublished = {\url{https://github.com/michael-wzhu/ChatMed}}, } ``` ### Contributions [More Information Needed]
ticoAg/ChatMed_Consult_Dataset
[ "task_categories:text-generation", "language:zh", "license:cc-by-4.0", "medical consultation", "finetuning", "region:us" ]
2023-08-18T14:36:29+00:00
{"language": ["zh"], "license": "cc-by-4.0", "task_categories": ["text-generation"], "pretty_name": "ChatMed-Dataset", "tags": ["medical consultation", "finetuning"]}
2023-08-18T14:39:11+00:00
[]
[ "zh" ]
TAGS #task_categories-text-generation #language-Chinese #license-cc-by-4.0 #medical consultation #finetuning #region-us
Dataset Card for ChatMed ======================== Dataset Description ------------------- * Homepage: URL * Repository: URL * Paper: * Leaderboard: * Point of Contact: Wei Zhu ### Dataset Summary ChatMed-Dataset is a dataset of 110,113 medical query-response pairs (in Chinese) generated by OpenAI's 'GPT-3.5' engine. The queries are crawled from several online medical consultation sites, reflecting the medical needs in the real world. The responses are generated by the OpenAI engine. This dataset is designated to to inject medical knowledge into Chinese large language models. The dataset size growing rapidly. Stay tuned for updates! 在大模型时代,模型都是趋同的,比如开源社区由于LlaMa的"开源",出现了各种stanford-alpaca等一系列的开源模型。但是中文垂直领域上的AGI数据集相对匮乏。 本数据集, ChatMed-Dataset, 中的query(或者是prompt)来自于互联网上的医疗问诊问题(110,113),反映了真实世界的不同用户/患者的医疗问诊需求。目前response都是由OpenAI 'GPT-3.5'引擎回答的。我们后续会对互联网上的医生回答与患者回答进行筛选甄别,择优选择,构建质量更优的数据集。 ### Supported Tasks and Leaderboards The ChatMed-Dataset designed for fine-tuning pretrained language models to perform better in automatic medical consultations. ### Languages The data in ChatMed-Dataset are in Chinese (zh). Dataset Structure ----------------- ### Data Instances The dataset is in json-line format and is very convenient to use! An example in the ChatMed-Dataset looks as follows: ### Data Fields The data fields are as follows: * 'query': real-life medical query from the web. * 'response': response generated by the OpenAI 'GPT-3.5' engine. ### Data Splits Dataset Creation ---------------- ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information Considerations for Using the Data --------------------------------- ### Social Impact of Dataset > > We believe that releasing the above assets will enable the academic community to perform controlled scientific studies on how large language models responses to real-life medical query. > More social impacts will be discussed soon! > > > ### Discussion of Biases ### Other Known Limitations The 'ChatMed-Dataset' data is generated by a language model ('GPT-3.5') and inevitably contains some errors or biases. We encourage users to use this data with caution and propose new methods to filter or improve the imperfections. Additional Information ---------------------- ### Dataset Curators ### Licensing Information The dataset is available under the Creative Commons NonCommercial (CC BY-NC 4.0). ### Contributions
[ "### Dataset Summary\n\n\nChatMed-Dataset is a dataset of 110,113 medical query-response pairs (in Chinese) generated by OpenAI's 'GPT-3.5' engine. The queries are crawled from several online medical consultation sites, reflecting the medical needs in the real world. The responses are generated by the OpenAI engine. This dataset is designated to to inject medical knowledge into Chinese large language models.\n\n\nThe dataset size growing rapidly. Stay tuned for updates!\n\n\n在大模型时代,模型都是趋同的,比如开源社区由于LlaMa的\"开源\",出现了各种stanford-alpaca等一系列的开源模型。但是中文垂直领域上的AGI数据集相对匮乏。\n\n\n本数据集, ChatMed-Dataset, 中的query(或者是prompt)来自于互联网上的医疗问诊问题(110,113),反映了真实世界的不同用户/患者的医疗问诊需求。目前response都是由OpenAI 'GPT-3.5'引擎回答的。我们后续会对互联网上的医生回答与患者回答进行筛选甄别,择优选择,构建质量更优的数据集。", "### Supported Tasks and Leaderboards\n\n\nThe ChatMed-Dataset designed for fine-tuning pretrained language models to perform better in automatic medical consultations.", "### Languages\n\n\nThe data in ChatMed-Dataset are in Chinese (zh).\n\n\nDataset Structure\n-----------------", "### Data Instances\n\n\nThe dataset is in json-line format and is very convenient to use! An example in the ChatMed-Dataset looks as follows:", "### Data Fields\n\n\nThe data fields are as follows:\n\n\n* 'query': real-life medical query from the web.\n* 'response': response generated by the OpenAI 'GPT-3.5' engine.", "### Data Splits\n\n\n\nDataset Creation\n----------------", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information\n\n\nConsiderations for Using the Data\n---------------------------------", "### Social Impact of Dataset\n\n\n\n> \n> We believe that releasing the above assets will enable the academic community to perform controlled scientific studies on how large language models responses to real-life medical query.\n> More social impacts will be discussed soon!\n> \n> \n>", "### Discussion of Biases", "### Other Known Limitations\n\n\nThe 'ChatMed-Dataset' data is generated by a language model ('GPT-3.5') and inevitably contains some errors or biases. We encourage users to use this data with caution and propose new methods to filter or improve the imperfections.\n\n\nAdditional Information\n----------------------", "### Dataset Curators", "### Licensing Information\n\n\nThe dataset is available under the Creative Commons NonCommercial (CC BY-NC 4.0).", "### Contributions" ]
[ "TAGS\n#task_categories-text-generation #language-Chinese #license-cc-by-4.0 #medical consultation #finetuning #region-us \n", "### Dataset Summary\n\n\nChatMed-Dataset is a dataset of 110,113 medical query-response pairs (in Chinese) generated by OpenAI's 'GPT-3.5' engine. The queries are crawled from several online medical consultation sites, reflecting the medical needs in the real world. The responses are generated by the OpenAI engine. This dataset is designated to to inject medical knowledge into Chinese large language models.\n\n\nThe dataset size growing rapidly. Stay tuned for updates!\n\n\n在大模型时代,模型都是趋同的,比如开源社区由于LlaMa的\"开源\",出现了各种stanford-alpaca等一系列的开源模型。但是中文垂直领域上的AGI数据集相对匮乏。\n\n\n本数据集, ChatMed-Dataset, 中的query(或者是prompt)来自于互联网上的医疗问诊问题(110,113),反映了真实世界的不同用户/患者的医疗问诊需求。目前response都是由OpenAI 'GPT-3.5'引擎回答的。我们后续会对互联网上的医生回答与患者回答进行筛选甄别,择优选择,构建质量更优的数据集。", "### Supported Tasks and Leaderboards\n\n\nThe ChatMed-Dataset designed for fine-tuning pretrained language models to perform better in automatic medical consultations.", "### Languages\n\n\nThe data in ChatMed-Dataset are in Chinese (zh).\n\n\nDataset Structure\n-----------------", "### Data Instances\n\n\nThe dataset is in json-line format and is very convenient to use! An example in the ChatMed-Dataset looks as follows:", "### Data Fields\n\n\nThe data fields are as follows:\n\n\n* 'query': real-life medical query from the web.\n* 'response': response generated by the OpenAI 'GPT-3.5' engine.", "### Data Splits\n\n\n\nDataset Creation\n----------------", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information\n\n\nConsiderations for Using the Data\n---------------------------------", "### Social Impact of Dataset\n\n\n\n> \n> We believe that releasing the above assets will enable the academic community to perform controlled scientific studies on how large language models responses to real-life medical query.\n> More social impacts will be discussed soon!\n> \n> \n>", "### Discussion of Biases", "### Other Known Limitations\n\n\nThe 'ChatMed-Dataset' data is generated by a language model ('GPT-3.5') and inevitably contains some errors or biases. We encourage users to use this data with caution and propose new methods to filter or improve the imperfections.\n\n\nAdditional Information\n----------------------", "### Dataset Curators", "### Licensing Information\n\n\nThe dataset is available under the Creative Commons NonCommercial (CC BY-NC 4.0).", "### Contributions" ]
[ 39, 254, 36, 25, 37, 51, 11, 7, 4, 10, 10, 5, 5, 9, 18, 59, 8, 74, 6, 26, 5 ]
[ "passage: TAGS\n#task_categories-text-generation #language-Chinese #license-cc-by-4.0 #medical consultation #finetuning #region-us \n### Dataset Summary\n\n\nChatMed-Dataset is a dataset of 110,113 medical query-response pairs (in Chinese) generated by OpenAI's 'GPT-3.5' engine. The queries are crawled from several online medical consultation sites, reflecting the medical needs in the real world. The responses are generated by the OpenAI engine. This dataset is designated to to inject medical knowledge into Chinese large language models.\n\n\nThe dataset size growing rapidly. Stay tuned for updates!\n\n\n在大模型时代,模型都是趋同的,比如开源社区由于LlaMa的\"开源\",出现了各种stanford-alpaca等一系列的开源模型。但是中文垂直领域上的AGI数据集相对匮乏。\n\n\n本数据集, ChatMed-Dataset, 中的query(或者是prompt)来自于互联网上的医疗问诊问题(110,113),反映了真实世界的不同用户/患者的医疗问诊需求。目前response都是由OpenAI 'GPT-3.5'引擎回答的。我们后续会对互联网上的医生回答与患者回答进行筛选甄别,择优选择,构建质量更优的数据集。### Supported Tasks and Leaderboards\n\n\nThe ChatMed-Dataset designed for fine-tuning pretrained language models to perform better in automatic medical consultations.### Languages\n\n\nThe data in ChatMed-Dataset are in Chinese (zh).\n\n\nDataset Structure\n-----------------### Data Instances\n\n\nThe dataset is in json-line format and is very convenient to use! An example in the ChatMed-Dataset looks as follows:### Data Fields\n\n\nThe data fields are as follows:\n\n\n* 'query': real-life medical query from the web.\n* 'response': response generated by the OpenAI 'GPT-3.5' engine.### Data Splits\n\n\n\nDataset Creation\n----------------### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?" ]
be8c99cbb373dd09d68cb4857e97b1e38c9d0930
# Dataset Card for "BGL_GPTNEO_Baseline" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
EgilKarlsen/BGL_GPTNEO_Baseline
[ "region:us" ]
2023-08-18T14:45:21+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "0", "dtype": "float32"}, {"name": "1", "dtype": "float32"}, {"name": "2", "dtype": "float32"}, {"name": "3", "dtype": "float32"}, {"name": "4", "dtype": "float32"}, {"name": "5", "dtype": "float32"}, {"name": "6", "dtype": "float32"}, {"name": "7", "dtype": "float32"}, {"name": "8", "dtype": "float32"}, {"name": "9", "dtype": "float32"}, {"name": "10", "dtype": "float32"}, {"name": "11", "dtype": "float32"}, {"name": "12", "dtype": "float32"}, {"name": "13", "dtype": "float32"}, {"name": "14", "dtype": "float32"}, {"name": "15", "dtype": "float32"}, {"name": "16", "dtype": "float32"}, {"name": "17", "dtype": "float32"}, {"name": "18", "dtype": "float32"}, {"name": "19", "dtype": "float32"}, {"name": "20", "dtype": "float32"}, {"name": "21", "dtype": "float32"}, {"name": "22", "dtype": "float32"}, {"name": "23", "dtype": "float32"}, {"name": "24", "dtype": "float32"}, {"name": "25", "dtype": "float32"}, {"name": "26", "dtype": "float32"}, {"name": "27", "dtype": "float32"}, {"name": "28", "dtype": "float32"}, {"name": "29", "dtype": "float32"}, {"name": "30", "dtype": "float32"}, {"name": "31", "dtype": "float32"}, {"name": "32", "dtype": "float32"}, {"name": "33", "dtype": "float32"}, {"name": "34", "dtype": "float32"}, {"name": "35", "dtype": "float32"}, {"name": "36", "dtype": "float32"}, {"name": "37", "dtype": "float32"}, {"name": "38", "dtype": "float32"}, {"name": "39", "dtype": "float32"}, {"name": "40", "dtype": "float32"}, {"name": "41", "dtype": "float32"}, {"name": "42", "dtype": "float32"}, {"name": "43", "dtype": "float32"}, {"name": "44", "dtype": "float32"}, {"name": "45", "dtype": "float32"}, {"name": "46", "dtype": "float32"}, {"name": "47", "dtype": "float32"}, {"name": "48", "dtype": "float32"}, {"name": "49", "dtype": "float32"}, {"name": "50", "dtype": "float32"}, {"name": "51", "dtype": "float32"}, {"name": "52", "dtype": "float32"}, {"name": "53", "dtype": "float32"}, {"name": "54", "dtype": "float32"}, {"name": "55", "dtype": "float32"}, {"name": "56", "dtype": "float32"}, {"name": "57", "dtype": "float32"}, {"name": "58", "dtype": "float32"}, {"name": "59", "dtype": "float32"}, {"name": "60", "dtype": "float32"}, {"name": "61", "dtype": "float32"}, {"name": "62", "dtype": "float32"}, {"name": "63", "dtype": "float32"}, {"name": "64", "dtype": "float32"}, {"name": "65", "dtype": "float32"}, {"name": "66", "dtype": "float32"}, {"name": "67", "dtype": "float32"}, {"name": "68", "dtype": "float32"}, {"name": "69", "dtype": "float32"}, {"name": "70", "dtype": "float32"}, {"name": "71", "dtype": "float32"}, {"name": "72", "dtype": "float32"}, {"name": "73", "dtype": "float32"}, {"name": "74", "dtype": "float32"}, {"name": "75", "dtype": "float32"}, {"name": "76", "dtype": "float32"}, {"name": "77", "dtype": "float32"}, {"name": "78", "dtype": "float32"}, {"name": "79", "dtype": "float32"}, {"name": "80", "dtype": "float32"}, {"name": "81", "dtype": "float32"}, {"name": "82", "dtype": "float32"}, {"name": "83", "dtype": "float32"}, {"name": "84", "dtype": "float32"}, {"name": "85", "dtype": "float32"}, {"name": "86", "dtype": "float32"}, {"name": "87", "dtype": "float32"}, {"name": "88", "dtype": "float32"}, {"name": "89", "dtype": "float32"}, {"name": "90", "dtype": "float32"}, {"name": "91", "dtype": "float32"}, {"name": "92", "dtype": "float32"}, {"name": "93", "dtype": "float32"}, {"name": "94", "dtype": "float32"}, {"name": "95", "dtype": "float32"}, {"name": "96", "dtype": "float32"}, {"name": "97", "dtype": "float32"}, {"name": "98", "dtype": "float32"}, {"name": "99", "dtype": "float32"}, {"name": "100", "dtype": "float32"}, {"name": "101", "dtype": "float32"}, {"name": "102", "dtype": "float32"}, {"name": "103", "dtype": "float32"}, {"name": "104", "dtype": "float32"}, {"name": "105", "dtype": "float32"}, {"name": "106", "dtype": "float32"}, {"name": "107", "dtype": "float32"}, {"name": "108", "dtype": "float32"}, {"name": "109", "dtype": "float32"}, {"name": "110", "dtype": "float32"}, {"name": "111", "dtype": "float32"}, {"name": "112", "dtype": "float32"}, {"name": "113", "dtype": "float32"}, {"name": "114", "dtype": "float32"}, {"name": "115", "dtype": "float32"}, {"name": "116", "dtype": "float32"}, {"name": "117", "dtype": "float32"}, {"name": "118", "dtype": "float32"}, {"name": "119", "dtype": "float32"}, {"name": "120", "dtype": "float32"}, {"name": "121", "dtype": "float32"}, {"name": "122", "dtype": "float32"}, {"name": "123", "dtype": "float32"}, {"name": "124", "dtype": "float32"}, {"name": "125", "dtype": "float32"}, {"name": "126", "dtype": "float32"}, {"name": "127", "dtype": "float32"}, {"name": "128", "dtype": "float32"}, {"name": "129", "dtype": "float32"}, {"name": "130", "dtype": "float32"}, {"name": "131", "dtype": "float32"}, {"name": "132", "dtype": "float32"}, {"name": "133", "dtype": "float32"}, {"name": "134", "dtype": "float32"}, {"name": "135", "dtype": "float32"}, {"name": "136", "dtype": "float32"}, {"name": "137", "dtype": "float32"}, {"name": "138", "dtype": "float32"}, {"name": "139", "dtype": "float32"}, {"name": "140", "dtype": "float32"}, {"name": "141", "dtype": "float32"}, {"name": "142", "dtype": "float32"}, {"name": "143", "dtype": "float32"}, {"name": "144", "dtype": "float32"}, {"name": "145", "dtype": "float32"}, {"name": "146", "dtype": "float32"}, {"name": "147", "dtype": "float32"}, {"name": "148", "dtype": "float32"}, {"name": "149", "dtype": "float32"}, {"name": "150", "dtype": "float32"}, {"name": "151", "dtype": "float32"}, {"name": "152", "dtype": "float32"}, {"name": "153", "dtype": "float32"}, {"name": "154", "dtype": "float32"}, {"name": "155", "dtype": "float32"}, {"name": "156", "dtype": "float32"}, {"name": "157", "dtype": "float32"}, {"name": "158", "dtype": "float32"}, {"name": "159", "dtype": "float32"}, {"name": "160", "dtype": "float32"}, {"name": "161", "dtype": "float32"}, {"name": "162", "dtype": "float32"}, {"name": "163", "dtype": "float32"}, {"name": "164", "dtype": "float32"}, {"name": "165", "dtype": "float32"}, {"name": "166", "dtype": "float32"}, {"name": "167", "dtype": "float32"}, {"name": "168", "dtype": "float32"}, {"name": "169", "dtype": "float32"}, {"name": "170", "dtype": "float32"}, {"name": "171", "dtype": "float32"}, {"name": "172", "dtype": "float32"}, {"name": "173", "dtype": "float32"}, {"name": "174", "dtype": "float32"}, {"name": "175", "dtype": "float32"}, {"name": "176", "dtype": "float32"}, {"name": "177", "dtype": "float32"}, {"name": "178", "dtype": "float32"}, {"name": "179", "dtype": "float32"}, {"name": "180", "dtype": "float32"}, {"name": "181", "dtype": "float32"}, {"name": "182", "dtype": "float32"}, {"name": "183", "dtype": "float32"}, {"name": "184", "dtype": "float32"}, {"name": "185", "dtype": "float32"}, {"name": "186", "dtype": "float32"}, {"name": "187", "dtype": "float32"}, {"name": "188", "dtype": "float32"}, {"name": "189", "dtype": "float32"}, {"name": "190", "dtype": "float32"}, {"name": "191", "dtype": "float32"}, {"name": "192", "dtype": "float32"}, {"name": "193", "dtype": "float32"}, {"name": "194", "dtype": "float32"}, {"name": "195", "dtype": "float32"}, {"name": "196", "dtype": "float32"}, {"name": "197", "dtype": "float32"}, {"name": "198", "dtype": "float32"}, {"name": "199", "dtype": "float32"}, {"name": "200", "dtype": "float32"}, {"name": "201", "dtype": "float32"}, {"name": "202", "dtype": "float32"}, {"name": "203", "dtype": "float32"}, {"name": "204", "dtype": "float32"}, {"name": "205", "dtype": "float32"}, {"name": "206", "dtype": "float32"}, {"name": "207", "dtype": "float32"}, {"name": "208", "dtype": "float32"}, {"name": "209", "dtype": "float32"}, {"name": "210", "dtype": "float32"}, {"name": "211", "dtype": "float32"}, {"name": "212", "dtype": "float32"}, {"name": "213", "dtype": "float32"}, {"name": "214", "dtype": "float32"}, {"name": "215", "dtype": "float32"}, {"name": "216", "dtype": "float32"}, {"name": "217", "dtype": "float32"}, {"name": "218", "dtype": "float32"}, {"name": "219", "dtype": "float32"}, {"name": "220", "dtype": "float32"}, {"name": "221", "dtype": "float32"}, {"name": "222", "dtype": "float32"}, {"name": "223", "dtype": "float32"}, {"name": "224", "dtype": "float32"}, {"name": "225", "dtype": "float32"}, {"name": "226", "dtype": "float32"}, {"name": "227", "dtype": "float32"}, {"name": "228", "dtype": "float32"}, {"name": "229", "dtype": "float32"}, {"name": "230", "dtype": "float32"}, {"name": "231", "dtype": "float32"}, {"name": "232", "dtype": "float32"}, {"name": "233", "dtype": "float32"}, {"name": "234", "dtype": "float32"}, {"name": "235", "dtype": "float32"}, {"name": "236", "dtype": "float32"}, {"name": "237", "dtype": "float32"}, {"name": "238", "dtype": "float32"}, {"name": "239", "dtype": "float32"}, {"name": "240", "dtype": "float32"}, {"name": "241", "dtype": "float32"}, {"name": "242", "dtype": "float32"}, {"name": "243", "dtype": "float32"}, {"name": "244", "dtype": "float32"}, {"name": "245", "dtype": "float32"}, {"name": "246", "dtype": "float32"}, {"name": "247", "dtype": "float32"}, {"name": "248", "dtype": "float32"}, {"name": "249", "dtype": "float32"}, {"name": "250", "dtype": "float32"}, {"name": "251", "dtype": "float32"}, {"name": "252", "dtype": "float32"}, {"name": "253", "dtype": "float32"}, {"name": "254", "dtype": "float32"}, {"name": "255", "dtype": "float32"}, {"name": "256", "dtype": "float32"}, {"name": "257", "dtype": "float32"}, {"name": "258", "dtype": "float32"}, {"name": "259", "dtype": "float32"}, {"name": "260", "dtype": "float32"}, {"name": "261", "dtype": "float32"}, {"name": "262", "dtype": "float32"}, {"name": "263", "dtype": "float32"}, {"name": "264", "dtype": "float32"}, {"name": "265", "dtype": "float32"}, {"name": "266", "dtype": "float32"}, {"name": "267", "dtype": "float32"}, {"name": "268", "dtype": "float32"}, {"name": "269", "dtype": "float32"}, {"name": "270", "dtype": "float32"}, {"name": "271", "dtype": "float32"}, {"name": "272", "dtype": "float32"}, {"name": "273", "dtype": "float32"}, {"name": "274", "dtype": "float32"}, {"name": "275", "dtype": "float32"}, {"name": "276", "dtype": "float32"}, {"name": "277", "dtype": "float32"}, {"name": "278", "dtype": "float32"}, {"name": "279", "dtype": "float32"}, {"name": "280", "dtype": "float32"}, {"name": "281", "dtype": "float32"}, {"name": "282", "dtype": "float32"}, {"name": "283", "dtype": "float32"}, {"name": "284", "dtype": "float32"}, {"name": "285", "dtype": "float32"}, {"name": "286", "dtype": "float32"}, {"name": "287", "dtype": "float32"}, {"name": "288", "dtype": "float32"}, {"name": "289", "dtype": "float32"}, {"name": "290", "dtype": "float32"}, {"name": "291", "dtype": "float32"}, {"name": "292", "dtype": "float32"}, {"name": "293", "dtype": "float32"}, {"name": "294", "dtype": "float32"}, {"name": "295", "dtype": "float32"}, {"name": "296", "dtype": "float32"}, {"name": "297", "dtype": "float32"}, {"name": "298", "dtype": "float32"}, {"name": "299", "dtype": "float32"}, {"name": "300", "dtype": "float32"}, {"name": "301", "dtype": "float32"}, {"name": "302", "dtype": "float32"}, {"name": "303", "dtype": "float32"}, {"name": "304", "dtype": "float32"}, {"name": "305", "dtype": "float32"}, {"name": "306", "dtype": "float32"}, {"name": "307", "dtype": "float32"}, {"name": "308", "dtype": "float32"}, {"name": "309", "dtype": "float32"}, {"name": "310", "dtype": "float32"}, {"name": "311", "dtype": "float32"}, {"name": "312", "dtype": "float32"}, {"name": "313", "dtype": "float32"}, {"name": "314", "dtype": "float32"}, {"name": "315", "dtype": "float32"}, {"name": "316", "dtype": "float32"}, {"name": "317", "dtype": "float32"}, {"name": "318", "dtype": "float32"}, {"name": "319", "dtype": "float32"}, {"name": "320", "dtype": "float32"}, {"name": "321", "dtype": "float32"}, {"name": "322", "dtype": "float32"}, {"name": "323", "dtype": "float32"}, {"name": "324", "dtype": "float32"}, {"name": "325", "dtype": "float32"}, {"name": "326", "dtype": "float32"}, {"name": "327", "dtype": "float32"}, {"name": "328", "dtype": "float32"}, {"name": "329", "dtype": "float32"}, {"name": "330", "dtype": "float32"}, {"name": "331", "dtype": "float32"}, {"name": "332", "dtype": "float32"}, {"name": "333", "dtype": "float32"}, {"name": "334", "dtype": "float32"}, {"name": "335", "dtype": "float32"}, {"name": "336", "dtype": "float32"}, {"name": "337", "dtype": "float32"}, {"name": "338", "dtype": "float32"}, {"name": "339", "dtype": "float32"}, {"name": "340", "dtype": "float32"}, {"name": "341", "dtype": "float32"}, {"name": "342", "dtype": "float32"}, {"name": "343", "dtype": "float32"}, {"name": "344", "dtype": "float32"}, {"name": "345", "dtype": "float32"}, {"name": "346", "dtype": "float32"}, {"name": "347", "dtype": "float32"}, {"name": "348", "dtype": "float32"}, {"name": "349", "dtype": "float32"}, {"name": "350", "dtype": "float32"}, {"name": "351", "dtype": "float32"}, {"name": "352", "dtype": "float32"}, {"name": "353", "dtype": "float32"}, {"name": "354", "dtype": "float32"}, {"name": "355", "dtype": "float32"}, {"name": "356", "dtype": "float32"}, {"name": "357", "dtype": "float32"}, {"name": "358", "dtype": "float32"}, {"name": "359", "dtype": "float32"}, {"name": "360", "dtype": "float32"}, {"name": "361", "dtype": "float32"}, {"name": "362", "dtype": "float32"}, {"name": "363", "dtype": "float32"}, {"name": "364", "dtype": "float32"}, {"name": "365", "dtype": "float32"}, {"name": "366", "dtype": "float32"}, {"name": "367", "dtype": "float32"}, {"name": "368", "dtype": "float32"}, {"name": "369", "dtype": "float32"}, {"name": "370", "dtype": "float32"}, {"name": "371", "dtype": "float32"}, {"name": "372", "dtype": "float32"}, {"name": "373", "dtype": "float32"}, {"name": "374", "dtype": "float32"}, {"name": "375", "dtype": "float32"}, {"name": "376", "dtype": "float32"}, {"name": "377", "dtype": "float32"}, {"name": "378", "dtype": "float32"}, {"name": "379", "dtype": "float32"}, {"name": "380", "dtype": "float32"}, {"name": "381", "dtype": "float32"}, {"name": "382", "dtype": "float32"}, {"name": "383", "dtype": "float32"}, {"name": "384", "dtype": "float32"}, {"name": "385", "dtype": "float32"}, {"name": "386", "dtype": "float32"}, {"name": "387", "dtype": "float32"}, {"name": "388", "dtype": "float32"}, {"name": "389", "dtype": "float32"}, {"name": "390", "dtype": "float32"}, {"name": "391", "dtype": "float32"}, {"name": "392", "dtype": "float32"}, {"name": "393", "dtype": "float32"}, {"name": "394", "dtype": "float32"}, {"name": "395", "dtype": "float32"}, {"name": "396", "dtype": "float32"}, {"name": "397", "dtype": "float32"}, {"name": "398", "dtype": "float32"}, {"name": "399", "dtype": "float32"}, {"name": "400", "dtype": "float32"}, {"name": "401", "dtype": "float32"}, {"name": "402", "dtype": "float32"}, {"name": "403", "dtype": "float32"}, {"name": "404", "dtype": "float32"}, {"name": "405", "dtype": "float32"}, {"name": "406", "dtype": "float32"}, {"name": "407", "dtype": "float32"}, {"name": "408", "dtype": "float32"}, {"name": "409", "dtype": "float32"}, {"name": "410", "dtype": "float32"}, {"name": "411", "dtype": "float32"}, {"name": "412", "dtype": "float32"}, {"name": "413", "dtype": "float32"}, {"name": "414", "dtype": "float32"}, {"name": "415", "dtype": "float32"}, {"name": "416", "dtype": "float32"}, {"name": "417", "dtype": "float32"}, {"name": "418", "dtype": "float32"}, {"name": "419", "dtype": "float32"}, {"name": "420", "dtype": "float32"}, {"name": "421", "dtype": "float32"}, {"name": "422", "dtype": "float32"}, {"name": "423", "dtype": "float32"}, {"name": "424", "dtype": "float32"}, {"name": "425", "dtype": "float32"}, {"name": "426", "dtype": "float32"}, {"name": "427", "dtype": "float32"}, {"name": "428", "dtype": "float32"}, {"name": "429", "dtype": "float32"}, {"name": "430", "dtype": "float32"}, {"name": "431", "dtype": "float32"}, {"name": "432", "dtype": "float32"}, {"name": "433", "dtype": "float32"}, {"name": "434", "dtype": "float32"}, {"name": "435", "dtype": "float32"}, {"name": "436", "dtype": "float32"}, {"name": "437", "dtype": "float32"}, {"name": "438", "dtype": "float32"}, {"name": "439", "dtype": "float32"}, {"name": "440", "dtype": "float32"}, {"name": "441", "dtype": "float32"}, {"name": "442", "dtype": "float32"}, {"name": "443", "dtype": "float32"}, {"name": "444", "dtype": "float32"}, {"name": "445", "dtype": "float32"}, {"name": "446", "dtype": "float32"}, {"name": "447", "dtype": "float32"}, {"name": "448", "dtype": "float32"}, {"name": "449", "dtype": "float32"}, {"name": "450", "dtype": "float32"}, {"name": "451", "dtype": "float32"}, {"name": "452", "dtype": "float32"}, {"name": "453", "dtype": "float32"}, {"name": "454", "dtype": "float32"}, {"name": "455", "dtype": "float32"}, {"name": "456", "dtype": "float32"}, {"name": "457", "dtype": "float32"}, {"name": "458", "dtype": "float32"}, {"name": "459", "dtype": "float32"}, {"name": "460", "dtype": "float32"}, {"name": "461", "dtype": "float32"}, {"name": "462", "dtype": "float32"}, {"name": "463", "dtype": "float32"}, {"name": "464", "dtype": "float32"}, {"name": "465", "dtype": "float32"}, {"name": "466", "dtype": "float32"}, {"name": "467", "dtype": "float32"}, {"name": "468", "dtype": "float32"}, {"name": "469", "dtype": "float32"}, {"name": "470", "dtype": "float32"}, {"name": "471", "dtype": "float32"}, {"name": "472", "dtype": "float32"}, {"name": "473", "dtype": "float32"}, {"name": "474", "dtype": "float32"}, {"name": "475", "dtype": "float32"}, {"name": "476", "dtype": "float32"}, {"name": "477", "dtype": "float32"}, {"name": "478", "dtype": "float32"}, {"name": "479", "dtype": "float32"}, {"name": "480", "dtype": "float32"}, {"name": "481", "dtype": "float32"}, {"name": "482", "dtype": "float32"}, {"name": "483", "dtype": "float32"}, {"name": "484", "dtype": "float32"}, {"name": "485", "dtype": "float32"}, {"name": "486", "dtype": "float32"}, {"name": "487", "dtype": "float32"}, {"name": "488", "dtype": "float32"}, {"name": "489", "dtype": "float32"}, {"name": "490", "dtype": "float32"}, {"name": "491", "dtype": "float32"}, {"name": "492", "dtype": "float32"}, {"name": "493", "dtype": "float32"}, {"name": "494", "dtype": "float32"}, {"name": "495", "dtype": "float32"}, {"name": "496", "dtype": "float32"}, {"name": "497", "dtype": "float32"}, {"name": "498", "dtype": "float32"}, {"name": "499", "dtype": "float32"}, {"name": "500", "dtype": "float32"}, {"name": "501", "dtype": "float32"}, {"name": "502", "dtype": "float32"}, {"name": "503", "dtype": "float32"}, {"name": "504", "dtype": "float32"}, {"name": "505", "dtype": "float32"}, {"name": "506", "dtype": "float32"}, {"name": "507", "dtype": "float32"}, {"name": "508", "dtype": "float32"}, {"name": "509", "dtype": "float32"}, {"name": "510", "dtype": "float32"}, {"name": "511", "dtype": "float32"}, {"name": "512", "dtype": "float32"}, {"name": "513", "dtype": "float32"}, {"name": "514", "dtype": "float32"}, {"name": "515", "dtype": "float32"}, {"name": "516", "dtype": "float32"}, {"name": "517", "dtype": "float32"}, {"name": "518", "dtype": "float32"}, {"name": "519", "dtype": "float32"}, {"name": "520", "dtype": "float32"}, {"name": "521", "dtype": "float32"}, {"name": "522", "dtype": "float32"}, {"name": "523", "dtype": "float32"}, {"name": "524", "dtype": "float32"}, {"name": "525", "dtype": "float32"}, {"name": "526", "dtype": "float32"}, {"name": "527", "dtype": "float32"}, {"name": "528", "dtype": "float32"}, {"name": "529", "dtype": "float32"}, {"name": "530", "dtype": "float32"}, {"name": "531", "dtype": "float32"}, {"name": "532", "dtype": "float32"}, {"name": "533", "dtype": "float32"}, {"name": "534", "dtype": "float32"}, {"name": "535", "dtype": "float32"}, {"name": "536", "dtype": "float32"}, {"name": "537", "dtype": "float32"}, {"name": "538", "dtype": "float32"}, {"name": "539", "dtype": "float32"}, {"name": "540", "dtype": "float32"}, {"name": "541", "dtype": "float32"}, {"name": "542", "dtype": "float32"}, {"name": "543", "dtype": "float32"}, {"name": "544", "dtype": "float32"}, {"name": "545", "dtype": "float32"}, {"name": "546", "dtype": "float32"}, {"name": "547", "dtype": "float32"}, {"name": "548", "dtype": "float32"}, {"name": "549", "dtype": "float32"}, {"name": "550", "dtype": "float32"}, {"name": "551", "dtype": "float32"}, {"name": "552", "dtype": "float32"}, {"name": "553", "dtype": "float32"}, {"name": "554", "dtype": "float32"}, {"name": "555", "dtype": "float32"}, {"name": "556", "dtype": "float32"}, {"name": "557", "dtype": "float32"}, {"name": "558", "dtype": "float32"}, {"name": "559", "dtype": "float32"}, {"name": "560", "dtype": "float32"}, {"name": "561", "dtype": "float32"}, {"name": "562", "dtype": "float32"}, {"name": "563", "dtype": "float32"}, {"name": "564", "dtype": "float32"}, {"name": "565", "dtype": "float32"}, {"name": "566", "dtype": "float32"}, {"name": "567", "dtype": "float32"}, {"name": "568", "dtype": "float32"}, {"name": "569", "dtype": "float32"}, {"name": "570", "dtype": "float32"}, {"name": "571", "dtype": "float32"}, {"name": "572", "dtype": "float32"}, {"name": "573", "dtype": "float32"}, {"name": "574", "dtype": "float32"}, {"name": "575", "dtype": "float32"}, {"name": "576", "dtype": "float32"}, {"name": "577", "dtype": "float32"}, {"name": "578", "dtype": "float32"}, {"name": "579", "dtype": "float32"}, {"name": "580", "dtype": "float32"}, {"name": "581", "dtype": "float32"}, {"name": "582", "dtype": "float32"}, {"name": "583", "dtype": "float32"}, {"name": "584", "dtype": "float32"}, {"name": "585", "dtype": "float32"}, {"name": "586", "dtype": "float32"}, {"name": "587", "dtype": "float32"}, {"name": "588", "dtype": "float32"}, {"name": "589", "dtype": "float32"}, {"name": "590", "dtype": "float32"}, {"name": "591", "dtype": "float32"}, {"name": "592", "dtype": "float32"}, {"name": "593", "dtype": "float32"}, {"name": "594", "dtype": "float32"}, {"name": "595", "dtype": "float32"}, {"name": "596", "dtype": "float32"}, {"name": "597", "dtype": "float32"}, {"name": "598", "dtype": "float32"}, {"name": "599", "dtype": "float32"}, {"name": "600", "dtype": "float32"}, {"name": "601", "dtype": "float32"}, {"name": "602", "dtype": "float32"}, {"name": "603", "dtype": "float32"}, {"name": "604", "dtype": "float32"}, {"name": "605", "dtype": "float32"}, {"name": "606", "dtype": "float32"}, {"name": "607", "dtype": "float32"}, {"name": "608", "dtype": "float32"}, {"name": "609", "dtype": "float32"}, {"name": "610", "dtype": "float32"}, {"name": "611", "dtype": "float32"}, {"name": "612", "dtype": "float32"}, {"name": "613", "dtype": "float32"}, {"name": "614", "dtype": "float32"}, {"name": "615", "dtype": "float32"}, {"name": "616", "dtype": "float32"}, {"name": "617", "dtype": "float32"}, {"name": "618", "dtype": "float32"}, {"name": "619", "dtype": "float32"}, {"name": "620", "dtype": "float32"}, {"name": "621", "dtype": "float32"}, {"name": "622", "dtype": "float32"}, {"name": "623", "dtype": "float32"}, {"name": "624", "dtype": "float32"}, {"name": "625", "dtype": "float32"}, {"name": "626", "dtype": "float32"}, {"name": "627", "dtype": "float32"}, {"name": "628", "dtype": "float32"}, {"name": "629", "dtype": "float32"}, {"name": "630", "dtype": "float32"}, {"name": "631", "dtype": "float32"}, {"name": "632", "dtype": "float32"}, {"name": "633", "dtype": "float32"}, {"name": "634", "dtype": "float32"}, {"name": "635", "dtype": "float32"}, {"name": "636", "dtype": "float32"}, {"name": "637", "dtype": "float32"}, {"name": "638", "dtype": "float32"}, {"name": "639", "dtype": "float32"}, {"name": "640", "dtype": "float32"}, {"name": "641", "dtype": "float32"}, {"name": "642", "dtype": "float32"}, {"name": "643", "dtype": "float32"}, {"name": "644", "dtype": "float32"}, {"name": "645", "dtype": "float32"}, {"name": "646", "dtype": "float32"}, {"name": "647", "dtype": "float32"}, {"name": "648", "dtype": "float32"}, {"name": "649", "dtype": "float32"}, {"name": "650", "dtype": "float32"}, {"name": "651", "dtype": "float32"}, {"name": "652", "dtype": "float32"}, {"name": "653", "dtype": "float32"}, {"name": "654", "dtype": "float32"}, {"name": "655", "dtype": "float32"}, {"name": "656", "dtype": "float32"}, {"name": "657", "dtype": "float32"}, {"name": "658", "dtype": "float32"}, {"name": "659", "dtype": "float32"}, {"name": "660", "dtype": "float32"}, {"name": "661", "dtype": "float32"}, {"name": "662", "dtype": "float32"}, {"name": "663", "dtype": "float32"}, {"name": "664", "dtype": "float32"}, {"name": "665", "dtype": "float32"}, {"name": "666", "dtype": "float32"}, {"name": "667", "dtype": "float32"}, {"name": "668", "dtype": "float32"}, {"name": "669", "dtype": "float32"}, {"name": "670", "dtype": "float32"}, {"name": "671", "dtype": "float32"}, {"name": "672", "dtype": "float32"}, {"name": "673", "dtype": "float32"}, {"name": "674", "dtype": "float32"}, {"name": "675", "dtype": "float32"}, {"name": "676", "dtype": "float32"}, {"name": "677", "dtype": "float32"}, {"name": "678", "dtype": "float32"}, {"name": "679", "dtype": "float32"}, {"name": "680", "dtype": "float32"}, {"name": "681", "dtype": "float32"}, {"name": "682", "dtype": "float32"}, {"name": "683", "dtype": "float32"}, {"name": "684", "dtype": "float32"}, {"name": "685", "dtype": "float32"}, {"name": "686", "dtype": "float32"}, {"name": "687", "dtype": "float32"}, {"name": "688", "dtype": "float32"}, {"name": "689", "dtype": "float32"}, {"name": "690", "dtype": "float32"}, {"name": "691", "dtype": "float32"}, {"name": "692", "dtype": "float32"}, {"name": "693", "dtype": "float32"}, {"name": "694", "dtype": "float32"}, {"name": "695", "dtype": "float32"}, {"name": "696", "dtype": "float32"}, {"name": "697", "dtype": "float32"}, {"name": "698", "dtype": "float32"}, {"name": "699", "dtype": "float32"}, {"name": "700", "dtype": "float32"}, {"name": "701", "dtype": "float32"}, {"name": "702", "dtype": "float32"}, {"name": "703", "dtype": "float32"}, {"name": "704", "dtype": "float32"}, {"name": "705", "dtype": "float32"}, {"name": "706", "dtype": "float32"}, {"name": "707", "dtype": "float32"}, {"name": "708", "dtype": "float32"}, {"name": "709", "dtype": "float32"}, {"name": "710", "dtype": "float32"}, {"name": "711", "dtype": "float32"}, {"name": "712", "dtype": "float32"}, {"name": "713", "dtype": "float32"}, {"name": "714", "dtype": "float32"}, {"name": "715", "dtype": "float32"}, {"name": "716", "dtype": "float32"}, {"name": "717", "dtype": "float32"}, {"name": "718", "dtype": "float32"}, {"name": "719", "dtype": "float32"}, {"name": "720", "dtype": "float32"}, {"name": "721", "dtype": "float32"}, {"name": "722", "dtype": "float32"}, {"name": "723", "dtype": "float32"}, {"name": "724", "dtype": "float32"}, {"name": "725", "dtype": "float32"}, {"name": "726", "dtype": "float32"}, {"name": "727", "dtype": "float32"}, {"name": "728", "dtype": "float32"}, {"name": "729", "dtype": "float32"}, {"name": "730", "dtype": "float32"}, {"name": "731", "dtype": "float32"}, {"name": "732", "dtype": "float32"}, {"name": "733", "dtype": "float32"}, {"name": "734", "dtype": "float32"}, {"name": "735", "dtype": "float32"}, {"name": "736", "dtype": "float32"}, {"name": "737", "dtype": "float32"}, {"name": "738", "dtype": "float32"}, {"name": "739", "dtype": "float32"}, {"name": "740", "dtype": "float32"}, {"name": "741", "dtype": "float32"}, {"name": "742", "dtype": "float32"}, {"name": "743", "dtype": "float32"}, {"name": "744", "dtype": "float32"}, {"name": "745", "dtype": "float32"}, {"name": "746", "dtype": "float32"}, {"name": "747", "dtype": "float32"}, {"name": "748", "dtype": "float32"}, {"name": "749", "dtype": "float32"}, {"name": "750", "dtype": "float32"}, {"name": "751", "dtype": "float32"}, {"name": "752", "dtype": "float32"}, {"name": "753", "dtype": "float32"}, {"name": "754", "dtype": "float32"}, {"name": "755", "dtype": "float32"}, {"name": "756", "dtype": "float32"}, {"name": "757", "dtype": "float32"}, {"name": "758", "dtype": "float32"}, {"name": "759", "dtype": "float32"}, {"name": "760", "dtype": "float32"}, {"name": "761", "dtype": "float32"}, {"name": "762", "dtype": "float32"}, {"name": "763", "dtype": "float32"}, {"name": "764", "dtype": "float32"}, {"name": "765", "dtype": "float32"}, {"name": "766", "dtype": "float32"}, {"name": "767", "dtype": "float32"}, {"name": "768", "dtype": "float32"}, {"name": "769", "dtype": "float32"}, {"name": "770", "dtype": "float32"}, {"name": "771", "dtype": "float32"}, {"name": "772", "dtype": "float32"}, {"name": "773", "dtype": "float32"}, {"name": "774", "dtype": "float32"}, {"name": "775", "dtype": "float32"}, {"name": "776", "dtype": "float32"}, {"name": "777", "dtype": "float32"}, {"name": "778", "dtype": "float32"}, {"name": "779", "dtype": "float32"}, {"name": "780", "dtype": "float32"}, {"name": "781", "dtype": "float32"}, {"name": "782", "dtype": "float32"}, {"name": "783", "dtype": "float32"}, {"name": "784", "dtype": "float32"}, {"name": "785", "dtype": "float32"}, {"name": "786", "dtype": "float32"}, {"name": "787", "dtype": "float32"}, {"name": "788", "dtype": "float32"}, {"name": "789", "dtype": "float32"}, {"name": "790", "dtype": "float32"}, {"name": "791", "dtype": "float32"}, {"name": "792", "dtype": "float32"}, {"name": "793", "dtype": "float32"}, {"name": "794", "dtype": "float32"}, {"name": "795", "dtype": "float32"}, {"name": "796", "dtype": "float32"}, {"name": "797", "dtype": "float32"}, {"name": "798", "dtype": "float32"}, {"name": "799", "dtype": "float32"}, {"name": "800", "dtype": "float32"}, {"name": "801", "dtype": "float32"}, {"name": "802", "dtype": "float32"}, {"name": "803", "dtype": "float32"}, {"name": "804", "dtype": "float32"}, {"name": "805", "dtype": "float32"}, {"name": "806", "dtype": "float32"}, {"name": "807", "dtype": "float32"}, {"name": "808", "dtype": "float32"}, {"name": "809", "dtype": "float32"}, {"name": "810", "dtype": "float32"}, {"name": "811", "dtype": "float32"}, {"name": "812", "dtype": "float32"}, {"name": "813", "dtype": "float32"}, {"name": "814", "dtype": "float32"}, {"name": "815", "dtype": "float32"}, {"name": "816", "dtype": "float32"}, {"name": "817", "dtype": "float32"}, {"name": "818", "dtype": "float32"}, {"name": "819", "dtype": "float32"}, {"name": "820", "dtype": "float32"}, {"name": "821", "dtype": "float32"}, {"name": "822", "dtype": "float32"}, {"name": "823", "dtype": "float32"}, {"name": "824", "dtype": "float32"}, {"name": "825", "dtype": "float32"}, {"name": "826", "dtype": "float32"}, {"name": "827", "dtype": "float32"}, {"name": "828", "dtype": "float32"}, {"name": "829", "dtype": "float32"}, {"name": "830", "dtype": "float32"}, {"name": "831", "dtype": "float32"}, {"name": "832", "dtype": "float32"}, {"name": "833", "dtype": "float32"}, {"name": "834", "dtype": "float32"}, {"name": "835", "dtype": "float32"}, {"name": "836", "dtype": "float32"}, {"name": "837", "dtype": "float32"}, {"name": "838", "dtype": "float32"}, {"name": "839", "dtype": "float32"}, {"name": "840", "dtype": "float32"}, {"name": "841", "dtype": "float32"}, {"name": "842", "dtype": "float32"}, {"name": "843", "dtype": "float32"}, {"name": "844", "dtype": "float32"}, {"name": "845", "dtype": "float32"}, {"name": "846", "dtype": "float32"}, {"name": "847", "dtype": "float32"}, {"name": "848", "dtype": "float32"}, {"name": "849", "dtype": "float32"}, {"name": "850", "dtype": "float32"}, {"name": "851", "dtype": "float32"}, {"name": "852", "dtype": "float32"}, {"name": "853", "dtype": "float32"}, {"name": "854", "dtype": "float32"}, {"name": "855", "dtype": "float32"}, {"name": "856", "dtype": "float32"}, {"name": "857", "dtype": "float32"}, {"name": "858", "dtype": "float32"}, {"name": "859", "dtype": "float32"}, {"name": "860", "dtype": "float32"}, {"name": "861", "dtype": "float32"}, {"name": "862", "dtype": "float32"}, {"name": "863", "dtype": "float32"}, {"name": "864", "dtype": "float32"}, {"name": "865", "dtype": "float32"}, {"name": "866", "dtype": "float32"}, {"name": "867", "dtype": "float32"}, {"name": "868", "dtype": "float32"}, {"name": "869", "dtype": "float32"}, {"name": "870", "dtype": "float32"}, {"name": "871", "dtype": "float32"}, {"name": "872", "dtype": "float32"}, {"name": "873", "dtype": "float32"}, {"name": "874", "dtype": "float32"}, {"name": "875", "dtype": "float32"}, {"name": "876", "dtype": "float32"}, {"name": "877", "dtype": "float32"}, {"name": "878", "dtype": "float32"}, {"name": "879", "dtype": "float32"}, {"name": "880", "dtype": "float32"}, {"name": "881", "dtype": "float32"}, {"name": "882", "dtype": "float32"}, {"name": "883", "dtype": "float32"}, {"name": "884", "dtype": "float32"}, {"name": "885", "dtype": "float32"}, {"name": "886", "dtype": "float32"}, {"name": "887", "dtype": "float32"}, {"name": "888", "dtype": "float32"}, {"name": "889", "dtype": "float32"}, {"name": "890", "dtype": "float32"}, {"name": "891", "dtype": "float32"}, {"name": "892", "dtype": "float32"}, {"name": "893", "dtype": "float32"}, {"name": "894", "dtype": "float32"}, {"name": "895", "dtype": "float32"}, {"name": "896", "dtype": "float32"}, {"name": "897", "dtype": "float32"}, {"name": "898", "dtype": "float32"}, {"name": "899", "dtype": "float32"}, {"name": "900", "dtype": "float32"}, {"name": "901", "dtype": "float32"}, {"name": "902", "dtype": "float32"}, {"name": "903", "dtype": "float32"}, {"name": "904", "dtype": "float32"}, {"name": "905", "dtype": "float32"}, {"name": "906", "dtype": "float32"}, {"name": "907", "dtype": "float32"}, {"name": "908", "dtype": "float32"}, {"name": "909", "dtype": "float32"}, {"name": "910", "dtype": "float32"}, {"name": "911", "dtype": "float32"}, {"name": "912", "dtype": "float32"}, {"name": "913", "dtype": "float32"}, {"name": "914", "dtype": "float32"}, {"name": "915", "dtype": "float32"}, {"name": "916", "dtype": "float32"}, {"name": "917", "dtype": "float32"}, {"name": "918", "dtype": "float32"}, {"name": "919", "dtype": "float32"}, {"name": "920", "dtype": "float32"}, {"name": "921", "dtype": "float32"}, {"name": "922", "dtype": "float32"}, {"name": "923", "dtype": "float32"}, {"name": "924", "dtype": "float32"}, {"name": "925", "dtype": "float32"}, {"name": "926", "dtype": "float32"}, {"name": "927", "dtype": "float32"}, {"name": "928", "dtype": "float32"}, {"name": "929", "dtype": "float32"}, {"name": "930", "dtype": "float32"}, {"name": "931", "dtype": "float32"}, {"name": "932", "dtype": "float32"}, {"name": "933", "dtype": "float32"}, {"name": "934", "dtype": "float32"}, {"name": "935", "dtype": "float32"}, {"name": "936", "dtype": "float32"}, {"name": "937", "dtype": "float32"}, {"name": "938", "dtype": "float32"}, {"name": "939", "dtype": "float32"}, {"name": "940", "dtype": "float32"}, {"name": "941", "dtype": "float32"}, {"name": "942", "dtype": "float32"}, {"name": "943", "dtype": "float32"}, {"name": "944", "dtype": "float32"}, {"name": "945", "dtype": "float32"}, {"name": "946", "dtype": "float32"}, {"name": "947", "dtype": "float32"}, {"name": "948", "dtype": "float32"}, {"name": "949", "dtype": "float32"}, {"name": "950", "dtype": "float32"}, {"name": "951", "dtype": "float32"}, {"name": "952", "dtype": "float32"}, {"name": "953", "dtype": "float32"}, {"name": "954", "dtype": "float32"}, {"name": "955", "dtype": "float32"}, {"name": "956", "dtype": "float32"}, {"name": "957", "dtype": "float32"}, {"name": "958", "dtype": "float32"}, {"name": "959", "dtype": "float32"}, {"name": "960", "dtype": "float32"}, {"name": "961", "dtype": "float32"}, {"name": "962", "dtype": "float32"}, {"name": "963", "dtype": "float32"}, {"name": "964", "dtype": "float32"}, {"name": "965", "dtype": "float32"}, {"name": "966", "dtype": "float32"}, {"name": "967", "dtype": "float32"}, {"name": "968", "dtype": "float32"}, {"name": "969", "dtype": "float32"}, {"name": "970", "dtype": "float32"}, {"name": "971", "dtype": "float32"}, {"name": "972", "dtype": "float32"}, {"name": "973", "dtype": "float32"}, {"name": "974", "dtype": "float32"}, {"name": "975", "dtype": "float32"}, {"name": "976", "dtype": "float32"}, {"name": "977", "dtype": "float32"}, {"name": "978", "dtype": "float32"}, {"name": "979", "dtype": "float32"}, {"name": "980", "dtype": "float32"}, {"name": "981", "dtype": "float32"}, {"name": "982", "dtype": "float32"}, {"name": "983", "dtype": "float32"}, {"name": "984", "dtype": "float32"}, {"name": "985", "dtype": "float32"}, {"name": "986", "dtype": "float32"}, {"name": "987", "dtype": "float32"}, {"name": "988", "dtype": "float32"}, {"name": "989", "dtype": "float32"}, {"name": "990", "dtype": "float32"}, {"name": "991", "dtype": "float32"}, {"name": "992", "dtype": "float32"}, {"name": "993", "dtype": "float32"}, {"name": "994", "dtype": "float32"}, {"name": "995", "dtype": "float32"}, {"name": "996", "dtype": "float32"}, {"name": "997", "dtype": "float32"}, {"name": "998", "dtype": "float32"}, {"name": "999", "dtype": "float32"}, {"name": "1000", "dtype": "float32"}, {"name": "1001", "dtype": "float32"}, {"name": "1002", "dtype": "float32"}, {"name": "1003", "dtype": "float32"}, {"name": "1004", "dtype": "float32"}, {"name": "1005", "dtype": "float32"}, {"name": "1006", "dtype": "float32"}, {"name": "1007", "dtype": "float32"}, {"name": "1008", "dtype": "float32"}, {"name": "1009", "dtype": "float32"}, {"name": "1010", "dtype": "float32"}, {"name": "1011", "dtype": "float32"}, {"name": "1012", "dtype": "float32"}, {"name": "1013", "dtype": "float32"}, {"name": "1014", "dtype": "float32"}, {"name": "1015", "dtype": "float32"}, {"name": "1016", "dtype": "float32"}, {"name": "1017", "dtype": "float32"}, {"name": "1018", "dtype": "float32"}, {"name": "1019", "dtype": "float32"}, {"name": "1020", "dtype": "float32"}, {"name": "1021", "dtype": "float32"}, {"name": "1022", "dtype": "float32"}, {"name": "1023", "dtype": "float32"}, {"name": "1024", "dtype": "float32"}, {"name": "1025", "dtype": "float32"}, {"name": "1026", "dtype": "float32"}, {"name": "1027", "dtype": "float32"}, {"name": "1028", "dtype": "float32"}, {"name": "1029", "dtype": "float32"}, {"name": "1030", "dtype": "float32"}, {"name": "1031", "dtype": "float32"}, {"name": "1032", "dtype": "float32"}, {"name": "1033", "dtype": "float32"}, {"name": "1034", "dtype": "float32"}, {"name": "1035", "dtype": "float32"}, {"name": "1036", "dtype": "float32"}, {"name": "1037", "dtype": "float32"}, {"name": "1038", "dtype": "float32"}, {"name": "1039", "dtype": "float32"}, {"name": "1040", "dtype": "float32"}, {"name": "1041", "dtype": "float32"}, {"name": "1042", "dtype": "float32"}, {"name": "1043", "dtype": "float32"}, {"name": "1044", "dtype": "float32"}, {"name": "1045", "dtype": "float32"}, {"name": "1046", "dtype": "float32"}, {"name": "1047", "dtype": "float32"}, {"name": "1048", "dtype": "float32"}, {"name": "1049", "dtype": "float32"}, {"name": "1050", "dtype": "float32"}, {"name": "1051", "dtype": "float32"}, {"name": "1052", "dtype": "float32"}, {"name": "1053", "dtype": "float32"}, {"name": "1054", "dtype": "float32"}, {"name": "1055", "dtype": "float32"}, {"name": "1056", "dtype": "float32"}, {"name": "1057", "dtype": "float32"}, {"name": "1058", "dtype": "float32"}, {"name": "1059", "dtype": "float32"}, {"name": "1060", "dtype": "float32"}, {"name": "1061", "dtype": "float32"}, {"name": "1062", "dtype": "float32"}, {"name": "1063", "dtype": "float32"}, {"name": "1064", "dtype": "float32"}, {"name": "1065", "dtype": "float32"}, {"name": "1066", "dtype": "float32"}, {"name": "1067", "dtype": "float32"}, {"name": "1068", "dtype": "float32"}, {"name": "1069", "dtype": "float32"}, {"name": "1070", "dtype": "float32"}, {"name": "1071", "dtype": "float32"}, {"name": "1072", "dtype": "float32"}, {"name": "1073", "dtype": "float32"}, {"name": "1074", "dtype": "float32"}, {"name": "1075", "dtype": "float32"}, {"name": "1076", "dtype": "float32"}, {"name": "1077", "dtype": "float32"}, {"name": "1078", "dtype": "float32"}, {"name": "1079", "dtype": "float32"}, {"name": "1080", "dtype": "float32"}, {"name": "1081", "dtype": "float32"}, {"name": "1082", "dtype": "float32"}, {"name": "1083", "dtype": "float32"}, {"name": "1084", "dtype": "float32"}, {"name": "1085", "dtype": "float32"}, {"name": "1086", "dtype": "float32"}, {"name": "1087", "dtype": "float32"}, {"name": "1088", "dtype": "float32"}, {"name": "1089", "dtype": "float32"}, {"name": "1090", "dtype": "float32"}, {"name": "1091", "dtype": "float32"}, {"name": "1092", "dtype": "float32"}, {"name": "1093", "dtype": "float32"}, {"name": "1094", "dtype": "float32"}, {"name": "1095", "dtype": "float32"}, {"name": "1096", "dtype": "float32"}, {"name": "1097", "dtype": "float32"}, {"name": "1098", "dtype": "float32"}, {"name": "1099", "dtype": "float32"}, {"name": "1100", "dtype": "float32"}, {"name": "1101", "dtype": "float32"}, {"name": "1102", "dtype": "float32"}, {"name": "1103", "dtype": "float32"}, {"name": "1104", "dtype": "float32"}, {"name": "1105", "dtype": "float32"}, {"name": "1106", "dtype": "float32"}, {"name": "1107", "dtype": "float32"}, {"name": "1108", "dtype": "float32"}, {"name": "1109", "dtype": "float32"}, {"name": "1110", "dtype": "float32"}, {"name": "1111", "dtype": "float32"}, {"name": "1112", "dtype": "float32"}, {"name": "1113", "dtype": "float32"}, {"name": "1114", "dtype": "float32"}, {"name": "1115", "dtype": "float32"}, {"name": "1116", "dtype": "float32"}, {"name": "1117", "dtype": "float32"}, {"name": "1118", "dtype": "float32"}, {"name": "1119", "dtype": "float32"}, {"name": "1120", "dtype": "float32"}, {"name": "1121", "dtype": "float32"}, {"name": "1122", "dtype": "float32"}, {"name": "1123", "dtype": "float32"}, {"name": "1124", "dtype": "float32"}, {"name": "1125", "dtype": "float32"}, {"name": "1126", "dtype": "float32"}, {"name": "1127", "dtype": "float32"}, {"name": "1128", "dtype": "float32"}, {"name": "1129", "dtype": "float32"}, {"name": "1130", "dtype": "float32"}, {"name": "1131", "dtype": "float32"}, {"name": "1132", "dtype": "float32"}, {"name": "1133", "dtype": "float32"}, {"name": "1134", "dtype": "float32"}, {"name": "1135", "dtype": "float32"}, {"name": "1136", "dtype": "float32"}, {"name": "1137", "dtype": "float32"}, {"name": "1138", "dtype": "float32"}, {"name": "1139", "dtype": "float32"}, {"name": "1140", "dtype": "float32"}, {"name": "1141", "dtype": "float32"}, {"name": "1142", "dtype": "float32"}, {"name": "1143", "dtype": "float32"}, {"name": "1144", "dtype": "float32"}, {"name": "1145", "dtype": "float32"}, {"name": "1146", "dtype": "float32"}, {"name": "1147", "dtype": "float32"}, {"name": "1148", "dtype": "float32"}, {"name": "1149", "dtype": "float32"}, {"name": "1150", "dtype": "float32"}, {"name": "1151", "dtype": "float32"}, {"name": "1152", "dtype": "float32"}, {"name": "1153", "dtype": "float32"}, {"name": "1154", "dtype": "float32"}, {"name": "1155", "dtype": "float32"}, {"name": "1156", "dtype": "float32"}, {"name": "1157", "dtype": "float32"}, {"name": "1158", "dtype": "float32"}, {"name": "1159", "dtype": "float32"}, {"name": "1160", "dtype": "float32"}, {"name": "1161", "dtype": "float32"}, {"name": "1162", "dtype": "float32"}, {"name": "1163", "dtype": "float32"}, {"name": "1164", "dtype": "float32"}, {"name": "1165", "dtype": "float32"}, {"name": "1166", "dtype": "float32"}, {"name": "1167", "dtype": "float32"}, {"name": "1168", "dtype": "float32"}, {"name": "1169", "dtype": "float32"}, {"name": "1170", "dtype": "float32"}, {"name": "1171", "dtype": "float32"}, {"name": "1172", "dtype": "float32"}, {"name": "1173", "dtype": "float32"}, {"name": "1174", "dtype": "float32"}, {"name": "1175", "dtype": "float32"}, {"name": "1176", "dtype": "float32"}, {"name": "1177", "dtype": "float32"}, {"name": "1178", "dtype": "float32"}, {"name": "1179", "dtype": "float32"}, {"name": "1180", "dtype": "float32"}, {"name": "1181", "dtype": "float32"}, {"name": "1182", "dtype": "float32"}, {"name": "1183", "dtype": "float32"}, {"name": "1184", "dtype": "float32"}, {"name": "1185", "dtype": "float32"}, {"name": "1186", "dtype": "float32"}, {"name": "1187", "dtype": "float32"}, {"name": "1188", "dtype": "float32"}, {"name": "1189", "dtype": "float32"}, {"name": "1190", "dtype": "float32"}, {"name": "1191", "dtype": "float32"}, {"name": "1192", "dtype": "float32"}, {"name": "1193", "dtype": "float32"}, {"name": "1194", "dtype": "float32"}, {"name": "1195", "dtype": "float32"}, {"name": "1196", "dtype": "float32"}, {"name": "1197", "dtype": "float32"}, {"name": "1198", "dtype": "float32"}, {"name": "1199", "dtype": "float32"}, {"name": "1200", "dtype": "float32"}, {"name": "1201", "dtype": "float32"}, {"name": "1202", "dtype": "float32"}, {"name": "1203", "dtype": "float32"}, {"name": "1204", "dtype": "float32"}, {"name": "1205", "dtype": "float32"}, {"name": "1206", "dtype": "float32"}, {"name": "1207", "dtype": "float32"}, {"name": "1208", "dtype": "float32"}, {"name": "1209", "dtype": "float32"}, {"name": "1210", "dtype": "float32"}, {"name": "1211", "dtype": "float32"}, {"name": "1212", "dtype": "float32"}, {"name": "1213", "dtype": "float32"}, {"name": "1214", "dtype": "float32"}, {"name": "1215", "dtype": "float32"}, {"name": "1216", "dtype": "float32"}, {"name": "1217", "dtype": "float32"}, {"name": "1218", "dtype": "float32"}, {"name": "1219", "dtype": "float32"}, {"name": "1220", "dtype": "float32"}, {"name": "1221", "dtype": "float32"}, {"name": "1222", "dtype": "float32"}, {"name": "1223", "dtype": "float32"}, {"name": "1224", "dtype": "float32"}, {"name": "1225", "dtype": "float32"}, {"name": "1226", "dtype": "float32"}, {"name": "1227", "dtype": "float32"}, {"name": "1228", "dtype": "float32"}, {"name": "1229", "dtype": "float32"}, {"name": "1230", "dtype": "float32"}, {"name": "1231", "dtype": "float32"}, {"name": "1232", "dtype": "float32"}, {"name": "1233", "dtype": "float32"}, {"name": "1234", "dtype": "float32"}, {"name": "1235", "dtype": "float32"}, {"name": "1236", "dtype": "float32"}, {"name": "1237", "dtype": "float32"}, {"name": "1238", "dtype": "float32"}, {"name": "1239", "dtype": "float32"}, {"name": "1240", "dtype": "float32"}, {"name": "1241", "dtype": "float32"}, {"name": "1242", "dtype": "float32"}, {"name": "1243", "dtype": "float32"}, {"name": "1244", "dtype": "float32"}, {"name": "1245", "dtype": "float32"}, {"name": "1246", "dtype": "float32"}, {"name": "1247", "dtype": "float32"}, {"name": "1248", "dtype": "float32"}, {"name": "1249", "dtype": "float32"}, {"name": "1250", "dtype": "float32"}, {"name": "1251", "dtype": "float32"}, {"name": "1252", "dtype": "float32"}, {"name": "1253", "dtype": "float32"}, {"name": "1254", "dtype": "float32"}, {"name": "1255", "dtype": "float32"}, {"name": "1256", "dtype": "float32"}, {"name": "1257", "dtype": "float32"}, {"name": "1258", "dtype": "float32"}, {"name": "1259", "dtype": "float32"}, {"name": "1260", "dtype": "float32"}, {"name": "1261", "dtype": "float32"}, {"name": "1262", "dtype": "float32"}, {"name": "1263", "dtype": "float32"}, {"name": "1264", "dtype": "float32"}, {"name": "1265", "dtype": "float32"}, {"name": "1266", "dtype": "float32"}, {"name": "1267", "dtype": "float32"}, {"name": "1268", "dtype": "float32"}, {"name": "1269", "dtype": "float32"}, {"name": "1270", "dtype": "float32"}, {"name": "1271", "dtype": "float32"}, {"name": "1272", "dtype": "float32"}, {"name": "1273", "dtype": "float32"}, {"name": "1274", "dtype": "float32"}, {"name": "1275", "dtype": "float32"}, {"name": "1276", "dtype": "float32"}, {"name": "1277", "dtype": "float32"}, {"name": "1278", "dtype": "float32"}, {"name": "1279", "dtype": "float32"}, {"name": "1280", "dtype": "float32"}, {"name": "1281", "dtype": "float32"}, {"name": "1282", "dtype": "float32"}, {"name": "1283", "dtype": "float32"}, {"name": "1284", "dtype": "float32"}, {"name": "1285", "dtype": "float32"}, {"name": "1286", "dtype": "float32"}, {"name": "1287", "dtype": "float32"}, {"name": "1288", "dtype": "float32"}, {"name": "1289", "dtype": "float32"}, {"name": "1290", "dtype": "float32"}, {"name": "1291", "dtype": "float32"}, {"name": "1292", "dtype": "float32"}, {"name": "1293", "dtype": "float32"}, {"name": "1294", "dtype": "float32"}, {"name": "1295", "dtype": "float32"}, {"name": "1296", "dtype": "float32"}, {"name": "1297", "dtype": "float32"}, {"name": "1298", "dtype": "float32"}, {"name": "1299", "dtype": "float32"}, {"name": "1300", "dtype": "float32"}, {"name": "1301", "dtype": "float32"}, {"name": "1302", "dtype": "float32"}, {"name": "1303", "dtype": "float32"}, {"name": "1304", "dtype": "float32"}, {"name": "1305", "dtype": "float32"}, {"name": "1306", "dtype": "float32"}, {"name": "1307", "dtype": "float32"}, {"name": "1308", "dtype": "float32"}, {"name": "1309", "dtype": "float32"}, {"name": "1310", "dtype": "float32"}, {"name": "1311", "dtype": "float32"}, {"name": "1312", "dtype": "float32"}, {"name": "1313", "dtype": "float32"}, {"name": "1314", "dtype": "float32"}, {"name": "1315", "dtype": "float32"}, {"name": "1316", "dtype": "float32"}, {"name": "1317", "dtype": "float32"}, {"name": "1318", "dtype": "float32"}, {"name": "1319", "dtype": "float32"}, {"name": "1320", "dtype": "float32"}, {"name": "1321", "dtype": "float32"}, {"name": "1322", "dtype": "float32"}, {"name": "1323", "dtype": "float32"}, {"name": "1324", "dtype": "float32"}, {"name": "1325", "dtype": "float32"}, {"name": "1326", "dtype": "float32"}, {"name": "1327", "dtype": "float32"}, {"name": "1328", "dtype": "float32"}, {"name": "1329", "dtype": "float32"}, {"name": "1330", "dtype": "float32"}, {"name": "1331", "dtype": "float32"}, {"name": "1332", "dtype": "float32"}, {"name": "1333", "dtype": "float32"}, {"name": "1334", "dtype": "float32"}, {"name": "1335", "dtype": "float32"}, {"name": "1336", "dtype": "float32"}, {"name": "1337", "dtype": "float32"}, {"name": "1338", "dtype": "float32"}, {"name": "1339", "dtype": "float32"}, {"name": "1340", "dtype": "float32"}, {"name": "1341", "dtype": "float32"}, {"name": "1342", "dtype": "float32"}, {"name": "1343", "dtype": "float32"}, {"name": "1344", "dtype": "float32"}, {"name": "1345", "dtype": "float32"}, {"name": "1346", "dtype": "float32"}, {"name": "1347", "dtype": "float32"}, {"name": "1348", "dtype": "float32"}, {"name": "1349", "dtype": "float32"}, {"name": "1350", "dtype": "float32"}, {"name": "1351", "dtype": "float32"}, {"name": "1352", "dtype": "float32"}, {"name": "1353", "dtype": "float32"}, {"name": "1354", "dtype": "float32"}, {"name": "1355", "dtype": "float32"}, {"name": "1356", "dtype": "float32"}, {"name": "1357", "dtype": "float32"}, {"name": "1358", "dtype": "float32"}, {"name": "1359", "dtype": "float32"}, {"name": "1360", "dtype": "float32"}, {"name": "1361", "dtype": "float32"}, {"name": "1362", "dtype": "float32"}, {"name": "1363", "dtype": "float32"}, {"name": "1364", "dtype": "float32"}, {"name": "1365", "dtype": "float32"}, {"name": "1366", "dtype": "float32"}, {"name": "1367", "dtype": "float32"}, {"name": "1368", "dtype": "float32"}, {"name": "1369", "dtype": "float32"}, {"name": "1370", "dtype": "float32"}, {"name": "1371", "dtype": "float32"}, {"name": "1372", "dtype": "float32"}, {"name": "1373", "dtype": "float32"}, {"name": "1374", "dtype": "float32"}, {"name": "1375", "dtype": "float32"}, {"name": "1376", "dtype": "float32"}, {"name": "1377", "dtype": "float32"}, {"name": "1378", "dtype": "float32"}, {"name": "1379", "dtype": "float32"}, {"name": "1380", "dtype": "float32"}, {"name": "1381", "dtype": "float32"}, {"name": "1382", "dtype": "float32"}, {"name": "1383", "dtype": "float32"}, {"name": "1384", "dtype": "float32"}, {"name": "1385", "dtype": "float32"}, {"name": "1386", "dtype": "float32"}, {"name": "1387", "dtype": "float32"}, {"name": "1388", "dtype": "float32"}, {"name": "1389", "dtype": "float32"}, {"name": "1390", "dtype": "float32"}, {"name": "1391", "dtype": "float32"}, {"name": "1392", "dtype": "float32"}, {"name": "1393", "dtype": "float32"}, {"name": "1394", "dtype": "float32"}, {"name": "1395", "dtype": "float32"}, {"name": "1396", "dtype": "float32"}, {"name": "1397", "dtype": "float32"}, {"name": "1398", "dtype": "float32"}, {"name": "1399", "dtype": "float32"}, {"name": "1400", "dtype": "float32"}, {"name": "1401", "dtype": "float32"}, {"name": "1402", "dtype": "float32"}, {"name": "1403", "dtype": "float32"}, {"name": "1404", "dtype": "float32"}, {"name": "1405", "dtype": "float32"}, {"name": "1406", "dtype": "float32"}, {"name": "1407", "dtype": "float32"}, {"name": "1408", "dtype": "float32"}, {"name": "1409", "dtype": "float32"}, {"name": "1410", "dtype": "float32"}, {"name": "1411", "dtype": "float32"}, {"name": "1412", "dtype": "float32"}, {"name": "1413", "dtype": "float32"}, {"name": "1414", "dtype": "float32"}, {"name": "1415", "dtype": "float32"}, {"name": "1416", "dtype": "float32"}, {"name": "1417", "dtype": "float32"}, {"name": "1418", "dtype": "float32"}, {"name": "1419", "dtype": "float32"}, {"name": "1420", "dtype": "float32"}, {"name": "1421", "dtype": "float32"}, {"name": "1422", "dtype": "float32"}, {"name": "1423", "dtype": "float32"}, {"name": "1424", "dtype": "float32"}, {"name": "1425", "dtype": "float32"}, {"name": "1426", "dtype": "float32"}, {"name": "1427", "dtype": "float32"}, {"name": "1428", "dtype": "float32"}, {"name": "1429", "dtype": "float32"}, {"name": "1430", "dtype": "float32"}, {"name": "1431", "dtype": "float32"}, {"name": "1432", "dtype": "float32"}, {"name": "1433", "dtype": "float32"}, {"name": "1434", "dtype": "float32"}, {"name": "1435", "dtype": "float32"}, {"name": "1436", "dtype": "float32"}, {"name": "1437", "dtype": "float32"}, {"name": "1438", "dtype": "float32"}, {"name": "1439", "dtype": "float32"}, {"name": "1440", "dtype": "float32"}, {"name": "1441", "dtype": "float32"}, {"name": "1442", "dtype": "float32"}, {"name": "1443", "dtype": "float32"}, {"name": "1444", "dtype": "float32"}, {"name": "1445", "dtype": "float32"}, {"name": "1446", "dtype": "float32"}, {"name": "1447", "dtype": "float32"}, {"name": "1448", "dtype": "float32"}, {"name": "1449", "dtype": "float32"}, {"name": "1450", "dtype": "float32"}, {"name": "1451", "dtype": "float32"}, {"name": "1452", "dtype": "float32"}, {"name": "1453", "dtype": "float32"}, {"name": "1454", "dtype": "float32"}, {"name": "1455", "dtype": "float32"}, {"name": "1456", "dtype": "float32"}, {"name": "1457", "dtype": "float32"}, {"name": "1458", "dtype": "float32"}, {"name": "1459", "dtype": "float32"}, {"name": "1460", "dtype": "float32"}, {"name": "1461", "dtype": "float32"}, {"name": "1462", "dtype": "float32"}, {"name": "1463", "dtype": "float32"}, {"name": "1464", "dtype": "float32"}, {"name": "1465", "dtype": "float32"}, {"name": "1466", "dtype": "float32"}, {"name": "1467", "dtype": "float32"}, {"name": "1468", "dtype": "float32"}, {"name": "1469", "dtype": "float32"}, {"name": "1470", "dtype": "float32"}, {"name": "1471", "dtype": "float32"}, {"name": "1472", "dtype": "float32"}, {"name": "1473", "dtype": "float32"}, {"name": "1474", "dtype": "float32"}, {"name": "1475", "dtype": "float32"}, {"name": "1476", "dtype": "float32"}, {"name": "1477", "dtype": "float32"}, {"name": "1478", "dtype": "float32"}, {"name": "1479", "dtype": "float32"}, {"name": "1480", "dtype": "float32"}, {"name": "1481", "dtype": "float32"}, {"name": "1482", "dtype": "float32"}, {"name": "1483", "dtype": "float32"}, {"name": "1484", "dtype": "float32"}, {"name": "1485", "dtype": "float32"}, {"name": "1486", "dtype": "float32"}, {"name": "1487", "dtype": "float32"}, {"name": "1488", "dtype": "float32"}, {"name": "1489", "dtype": "float32"}, {"name": "1490", "dtype": "float32"}, {"name": "1491", "dtype": "float32"}, {"name": "1492", "dtype": "float32"}, {"name": "1493", "dtype": "float32"}, {"name": "1494", "dtype": "float32"}, {"name": "1495", "dtype": "float32"}, {"name": "1496", "dtype": "float32"}, {"name": "1497", "dtype": "float32"}, {"name": "1498", "dtype": "float32"}, {"name": "1499", "dtype": "float32"}, {"name": "1500", "dtype": "float32"}, {"name": "1501", "dtype": "float32"}, {"name": "1502", "dtype": "float32"}, {"name": "1503", "dtype": "float32"}, {"name": "1504", "dtype": "float32"}, {"name": "1505", "dtype": "float32"}, {"name": "1506", "dtype": "float32"}, {"name": "1507", "dtype": "float32"}, {"name": "1508", "dtype": "float32"}, {"name": "1509", "dtype": "float32"}, {"name": "1510", "dtype": "float32"}, {"name": "1511", "dtype": "float32"}, {"name": "1512", "dtype": "float32"}, {"name": "1513", "dtype": "float32"}, {"name": "1514", "dtype": "float32"}, {"name": "1515", "dtype": "float32"}, {"name": "1516", "dtype": "float32"}, {"name": "1517", "dtype": "float32"}, {"name": "1518", "dtype": "float32"}, {"name": "1519", "dtype": "float32"}, {"name": "1520", "dtype": "float32"}, {"name": "1521", "dtype": "float32"}, {"name": "1522", "dtype": "float32"}, {"name": "1523", "dtype": "float32"}, {"name": "1524", "dtype": "float32"}, {"name": "1525", "dtype": "float32"}, {"name": "1526", "dtype": "float32"}, {"name": "1527", "dtype": "float32"}, {"name": "1528", "dtype": "float32"}, {"name": "1529", "dtype": "float32"}, {"name": "1530", "dtype": "float32"}, {"name": "1531", "dtype": "float32"}, {"name": "1532", "dtype": "float32"}, {"name": "1533", "dtype": "float32"}, {"name": "1534", "dtype": "float32"}, {"name": "1535", "dtype": "float32"}, {"name": "1536", "dtype": "float32"}, {"name": "1537", "dtype": "float32"}, {"name": "1538", "dtype": "float32"}, {"name": "1539", "dtype": "float32"}, {"name": "1540", "dtype": "float32"}, {"name": "1541", "dtype": "float32"}, {"name": "1542", "dtype": "float32"}, {"name": "1543", "dtype": "float32"}, {"name": "1544", "dtype": "float32"}, {"name": "1545", "dtype": "float32"}, {"name": "1546", "dtype": "float32"}, {"name": "1547", "dtype": "float32"}, {"name": "1548", "dtype": "float32"}, {"name": "1549", "dtype": "float32"}, {"name": "1550", "dtype": "float32"}, {"name": "1551", "dtype": "float32"}, {"name": "1552", "dtype": "float32"}, {"name": "1553", "dtype": "float32"}, {"name": "1554", "dtype": "float32"}, {"name": "1555", "dtype": "float32"}, {"name": "1556", "dtype": "float32"}, {"name": "1557", "dtype": "float32"}, {"name": "1558", "dtype": "float32"}, {"name": "1559", "dtype": "float32"}, {"name": "1560", "dtype": "float32"}, {"name": "1561", "dtype": "float32"}, {"name": "1562", "dtype": "float32"}, {"name": "1563", "dtype": "float32"}, {"name": "1564", "dtype": "float32"}, {"name": "1565", "dtype": "float32"}, {"name": "1566", "dtype": "float32"}, {"name": "1567", "dtype": "float32"}, {"name": "1568", "dtype": "float32"}, {"name": "1569", "dtype": "float32"}, {"name": "1570", "dtype": "float32"}, {"name": "1571", "dtype": "float32"}, {"name": "1572", "dtype": "float32"}, {"name": "1573", "dtype": "float32"}, {"name": "1574", "dtype": "float32"}, {"name": "1575", "dtype": "float32"}, {"name": "1576", "dtype": "float32"}, {"name": "1577", "dtype": "float32"}, {"name": "1578", "dtype": "float32"}, {"name": "1579", "dtype": "float32"}, {"name": "1580", "dtype": "float32"}, {"name": "1581", "dtype": "float32"}, {"name": "1582", "dtype": "float32"}, {"name": "1583", "dtype": "float32"}, {"name": "1584", "dtype": "float32"}, {"name": "1585", "dtype": "float32"}, {"name": "1586", "dtype": "float32"}, {"name": "1587", "dtype": "float32"}, {"name": "1588", "dtype": "float32"}, {"name": "1589", "dtype": "float32"}, {"name": "1590", "dtype": "float32"}, {"name": "1591", "dtype": "float32"}, {"name": "1592", "dtype": "float32"}, {"name": "1593", "dtype": "float32"}, {"name": "1594", "dtype": "float32"}, {"name": "1595", "dtype": "float32"}, {"name": "1596", "dtype": "float32"}, {"name": "1597", "dtype": "float32"}, {"name": "1598", "dtype": "float32"}, {"name": "1599", "dtype": "float32"}, {"name": "1600", "dtype": "float32"}, {"name": "1601", "dtype": "float32"}, {"name": "1602", "dtype": "float32"}, {"name": "1603", "dtype": "float32"}, {"name": "1604", "dtype": "float32"}, {"name": "1605", "dtype": "float32"}, {"name": "1606", "dtype": "float32"}, {"name": "1607", "dtype": "float32"}, {"name": "1608", "dtype": "float32"}, {"name": "1609", "dtype": "float32"}, {"name": "1610", "dtype": "float32"}, {"name": "1611", "dtype": "float32"}, {"name": "1612", "dtype": "float32"}, {"name": "1613", "dtype": "float32"}, {"name": "1614", "dtype": "float32"}, {"name": "1615", "dtype": "float32"}, {"name": "1616", "dtype": "float32"}, {"name": "1617", "dtype": "float32"}, {"name": "1618", "dtype": "float32"}, {"name": "1619", "dtype": "float32"}, {"name": "1620", "dtype": "float32"}, {"name": "1621", "dtype": "float32"}, {"name": "1622", "dtype": "float32"}, {"name": "1623", "dtype": "float32"}, {"name": "1624", "dtype": "float32"}, {"name": "1625", "dtype": "float32"}, {"name": "1626", "dtype": "float32"}, {"name": "1627", "dtype": "float32"}, {"name": "1628", "dtype": "float32"}, {"name": "1629", "dtype": "float32"}, {"name": "1630", "dtype": "float32"}, {"name": "1631", "dtype": "float32"}, {"name": "1632", "dtype": "float32"}, {"name": "1633", "dtype": "float32"}, {"name": "1634", "dtype": "float32"}, {"name": "1635", "dtype": "float32"}, {"name": "1636", "dtype": "float32"}, {"name": "1637", "dtype": "float32"}, {"name": "1638", "dtype": "float32"}, {"name": "1639", "dtype": "float32"}, {"name": "1640", "dtype": "float32"}, {"name": "1641", "dtype": "float32"}, {"name": "1642", "dtype": "float32"}, {"name": "1643", "dtype": "float32"}, {"name": "1644", "dtype": "float32"}, {"name": "1645", "dtype": "float32"}, {"name": "1646", "dtype": "float32"}, {"name": "1647", "dtype": "float32"}, {"name": "1648", "dtype": "float32"}, {"name": "1649", "dtype": "float32"}, {"name": "1650", "dtype": "float32"}, {"name": "1651", "dtype": "float32"}, {"name": "1652", "dtype": "float32"}, {"name": "1653", "dtype": "float32"}, {"name": "1654", "dtype": "float32"}, {"name": "1655", "dtype": "float32"}, {"name": "1656", "dtype": "float32"}, {"name": "1657", "dtype": "float32"}, {"name": "1658", "dtype": "float32"}, {"name": "1659", "dtype": "float32"}, {"name": "1660", "dtype": "float32"}, {"name": "1661", "dtype": "float32"}, {"name": "1662", "dtype": "float32"}, {"name": "1663", "dtype": "float32"}, {"name": "1664", "dtype": "float32"}, {"name": "1665", "dtype": "float32"}, {"name": "1666", "dtype": "float32"}, {"name": "1667", "dtype": "float32"}, {"name": "1668", "dtype": "float32"}, {"name": "1669", "dtype": "float32"}, {"name": "1670", "dtype": "float32"}, {"name": "1671", "dtype": "float32"}, {"name": "1672", "dtype": "float32"}, {"name": "1673", "dtype": "float32"}, {"name": "1674", "dtype": "float32"}, {"name": "1675", "dtype": "float32"}, {"name": "1676", "dtype": "float32"}, {"name": "1677", "dtype": "float32"}, {"name": "1678", "dtype": "float32"}, {"name": "1679", "dtype": "float32"}, {"name": "1680", "dtype": "float32"}, {"name": "1681", "dtype": "float32"}, {"name": "1682", "dtype": "float32"}, {"name": "1683", "dtype": "float32"}, {"name": "1684", "dtype": "float32"}, {"name": "1685", "dtype": "float32"}, {"name": "1686", "dtype": "float32"}, {"name": "1687", "dtype": "float32"}, {"name": "1688", "dtype": "float32"}, {"name": "1689", "dtype": "float32"}, {"name": "1690", "dtype": "float32"}, {"name": "1691", "dtype": "float32"}, {"name": "1692", "dtype": "float32"}, {"name": "1693", "dtype": "float32"}, {"name": "1694", "dtype": "float32"}, {"name": "1695", "dtype": "float32"}, {"name": "1696", "dtype": "float32"}, {"name": "1697", "dtype": "float32"}, {"name": "1698", "dtype": "float32"}, {"name": "1699", "dtype": "float32"}, {"name": "1700", "dtype": "float32"}, {"name": "1701", "dtype": "float32"}, {"name": "1702", "dtype": "float32"}, {"name": "1703", "dtype": "float32"}, {"name": "1704", "dtype": "float32"}, {"name": "1705", "dtype": "float32"}, {"name": "1706", "dtype": "float32"}, {"name": "1707", "dtype": "float32"}, {"name": "1708", "dtype": "float32"}, {"name": "1709", "dtype": "float32"}, {"name": "1710", "dtype": "float32"}, {"name": "1711", "dtype": "float32"}, {"name": "1712", "dtype": "float32"}, {"name": "1713", "dtype": "float32"}, {"name": "1714", "dtype": "float32"}, {"name": "1715", "dtype": "float32"}, {"name": "1716", "dtype": "float32"}, {"name": "1717", "dtype": "float32"}, {"name": "1718", "dtype": "float32"}, {"name": "1719", "dtype": "float32"}, {"name": "1720", "dtype": "float32"}, {"name": "1721", "dtype": "float32"}, {"name": "1722", "dtype": "float32"}, {"name": "1723", "dtype": "float32"}, {"name": "1724", "dtype": "float32"}, {"name": "1725", "dtype": "float32"}, {"name": "1726", "dtype": "float32"}, {"name": "1727", "dtype": "float32"}, {"name": "1728", "dtype": "float32"}, {"name": "1729", "dtype": "float32"}, {"name": "1730", "dtype": "float32"}, {"name": "1731", "dtype": "float32"}, {"name": "1732", "dtype": "float32"}, {"name": "1733", "dtype": "float32"}, {"name": "1734", "dtype": "float32"}, {"name": "1735", "dtype": "float32"}, {"name": "1736", "dtype": "float32"}, {"name": "1737", "dtype": "float32"}, {"name": "1738", "dtype": "float32"}, {"name": "1739", "dtype": "float32"}, {"name": "1740", "dtype": "float32"}, {"name": "1741", "dtype": "float32"}, {"name": "1742", "dtype": "float32"}, {"name": "1743", "dtype": "float32"}, {"name": "1744", "dtype": "float32"}, {"name": "1745", "dtype": "float32"}, {"name": "1746", "dtype": "float32"}, {"name": "1747", "dtype": "float32"}, {"name": "1748", "dtype": "float32"}, {"name": "1749", "dtype": "float32"}, {"name": "1750", "dtype": "float32"}, {"name": "1751", "dtype": "float32"}, {"name": "1752", "dtype": "float32"}, {"name": "1753", "dtype": "float32"}, {"name": "1754", "dtype": "float32"}, {"name": "1755", "dtype": "float32"}, {"name": "1756", "dtype": "float32"}, {"name": "1757", "dtype": "float32"}, {"name": "1758", "dtype": "float32"}, {"name": "1759", "dtype": "float32"}, {"name": "1760", "dtype": "float32"}, {"name": "1761", "dtype": "float32"}, {"name": "1762", "dtype": "float32"}, {"name": "1763", "dtype": "float32"}, {"name": "1764", "dtype": "float32"}, {"name": "1765", "dtype": "float32"}, {"name": "1766", "dtype": "float32"}, {"name": "1767", "dtype": "float32"}, {"name": "1768", "dtype": "float32"}, {"name": "1769", "dtype": "float32"}, {"name": "1770", "dtype": "float32"}, {"name": "1771", "dtype": "float32"}, {"name": "1772", "dtype": "float32"}, {"name": "1773", "dtype": "float32"}, {"name": "1774", "dtype": "float32"}, {"name": "1775", "dtype": "float32"}, {"name": "1776", "dtype": "float32"}, {"name": "1777", "dtype": "float32"}, {"name": "1778", "dtype": "float32"}, {"name": "1779", "dtype": "float32"}, {"name": "1780", "dtype": "float32"}, {"name": "1781", "dtype": "float32"}, {"name": "1782", "dtype": "float32"}, {"name": "1783", "dtype": "float32"}, {"name": "1784", "dtype": "float32"}, {"name": "1785", "dtype": "float32"}, {"name": "1786", "dtype": "float32"}, {"name": "1787", "dtype": "float32"}, {"name": "1788", "dtype": "float32"}, {"name": "1789", "dtype": "float32"}, {"name": "1790", "dtype": "float32"}, {"name": "1791", "dtype": "float32"}, {"name": "1792", "dtype": "float32"}, {"name": "1793", "dtype": "float32"}, {"name": "1794", "dtype": "float32"}, {"name": "1795", "dtype": "float32"}, {"name": "1796", "dtype": "float32"}, {"name": "1797", "dtype": "float32"}, {"name": "1798", "dtype": "float32"}, {"name": "1799", "dtype": "float32"}, {"name": "1800", "dtype": "float32"}, {"name": "1801", "dtype": "float32"}, {"name": "1802", "dtype": "float32"}, {"name": "1803", "dtype": "float32"}, {"name": "1804", "dtype": "float32"}, {"name": "1805", "dtype": "float32"}, {"name": "1806", "dtype": "float32"}, {"name": "1807", "dtype": "float32"}, {"name": "1808", "dtype": "float32"}, {"name": "1809", "dtype": "float32"}, {"name": "1810", "dtype": "float32"}, {"name": "1811", "dtype": "float32"}, {"name": "1812", "dtype": "float32"}, {"name": "1813", "dtype": "float32"}, {"name": "1814", "dtype": "float32"}, {"name": "1815", "dtype": "float32"}, {"name": "1816", "dtype": "float32"}, {"name": "1817", "dtype": "float32"}, {"name": "1818", "dtype": "float32"}, {"name": "1819", "dtype": "float32"}, {"name": "1820", "dtype": "float32"}, {"name": "1821", "dtype": "float32"}, {"name": "1822", "dtype": "float32"}, {"name": "1823", "dtype": "float32"}, {"name": "1824", "dtype": "float32"}, {"name": "1825", "dtype": "float32"}, {"name": "1826", "dtype": "float32"}, {"name": "1827", "dtype": "float32"}, {"name": "1828", "dtype": "float32"}, {"name": "1829", "dtype": "float32"}, {"name": "1830", "dtype": "float32"}, {"name": "1831", "dtype": "float32"}, {"name": "1832", "dtype": "float32"}, {"name": "1833", "dtype": "float32"}, {"name": "1834", "dtype": "float32"}, {"name": "1835", "dtype": "float32"}, {"name": "1836", "dtype": "float32"}, {"name": "1837", "dtype": "float32"}, {"name": "1838", "dtype": "float32"}, {"name": "1839", "dtype": "float32"}, {"name": "1840", "dtype": "float32"}, {"name": "1841", "dtype": "float32"}, {"name": "1842", "dtype": "float32"}, {"name": "1843", "dtype": "float32"}, {"name": "1844", "dtype": "float32"}, {"name": "1845", "dtype": "float32"}, {"name": "1846", "dtype": "float32"}, {"name": "1847", "dtype": "float32"}, {"name": "1848", "dtype": "float32"}, {"name": "1849", "dtype": "float32"}, {"name": "1850", "dtype": "float32"}, {"name": "1851", "dtype": "float32"}, {"name": "1852", "dtype": "float32"}, {"name": "1853", "dtype": "float32"}, {"name": "1854", "dtype": "float32"}, {"name": "1855", "dtype": "float32"}, {"name": "1856", "dtype": "float32"}, {"name": "1857", "dtype": "float32"}, {"name": "1858", "dtype": "float32"}, {"name": "1859", "dtype": "float32"}, {"name": "1860", "dtype": "float32"}, {"name": "1861", "dtype": "float32"}, {"name": "1862", "dtype": "float32"}, {"name": "1863", "dtype": "float32"}, {"name": "1864", "dtype": "float32"}, {"name": "1865", "dtype": "float32"}, {"name": "1866", "dtype": "float32"}, {"name": "1867", "dtype": "float32"}, {"name": "1868", "dtype": "float32"}, {"name": "1869", "dtype": "float32"}, {"name": "1870", "dtype": "float32"}, {"name": "1871", "dtype": "float32"}, {"name": "1872", "dtype": "float32"}, {"name": "1873", "dtype": "float32"}, {"name": "1874", "dtype": "float32"}, {"name": "1875", "dtype": "float32"}, {"name": "1876", "dtype": "float32"}, {"name": "1877", "dtype": "float32"}, {"name": "1878", "dtype": "float32"}, {"name": "1879", "dtype": "float32"}, {"name": "1880", "dtype": "float32"}, {"name": "1881", "dtype": "float32"}, {"name": "1882", "dtype": "float32"}, {"name": "1883", "dtype": "float32"}, {"name": "1884", "dtype": "float32"}, {"name": "1885", "dtype": "float32"}, {"name": "1886", "dtype": "float32"}, {"name": "1887", "dtype": "float32"}, {"name": "1888", "dtype": "float32"}, {"name": "1889", "dtype": "float32"}, {"name": "1890", "dtype": "float32"}, {"name": "1891", "dtype": "float32"}, {"name": "1892", "dtype": "float32"}, {"name": "1893", "dtype": "float32"}, {"name": "1894", "dtype": "float32"}, {"name": "1895", "dtype": "float32"}, {"name": "1896", "dtype": "float32"}, {"name": "1897", "dtype": "float32"}, {"name": "1898", "dtype": "float32"}, {"name": "1899", "dtype": "float32"}, {"name": "1900", "dtype": "float32"}, {"name": "1901", "dtype": "float32"}, {"name": "1902", "dtype": "float32"}, {"name": "1903", "dtype": "float32"}, {"name": "1904", "dtype": "float32"}, {"name": "1905", "dtype": "float32"}, {"name": "1906", "dtype": "float32"}, {"name": "1907", "dtype": "float32"}, {"name": "1908", "dtype": "float32"}, {"name": "1909", "dtype": "float32"}, {"name": "1910", "dtype": "float32"}, {"name": "1911", "dtype": "float32"}, {"name": "1912", "dtype": "float32"}, {"name": "1913", "dtype": "float32"}, {"name": "1914", "dtype": "float32"}, {"name": "1915", "dtype": "float32"}, {"name": "1916", "dtype": "float32"}, {"name": "1917", "dtype": "float32"}, {"name": "1918", "dtype": "float32"}, {"name": "1919", "dtype": "float32"}, {"name": "1920", "dtype": "float32"}, {"name": "1921", "dtype": "float32"}, {"name": "1922", "dtype": "float32"}, {"name": "1923", "dtype": "float32"}, {"name": "1924", "dtype": "float32"}, {"name": "1925", "dtype": "float32"}, {"name": "1926", "dtype": "float32"}, {"name": "1927", "dtype": "float32"}, {"name": "1928", "dtype": "float32"}, {"name": "1929", "dtype": "float32"}, {"name": "1930", "dtype": "float32"}, {"name": "1931", "dtype": "float32"}, {"name": "1932", "dtype": "float32"}, {"name": "1933", "dtype": "float32"}, {"name": "1934", "dtype": "float32"}, {"name": "1935", "dtype": "float32"}, {"name": "1936", "dtype": "float32"}, {"name": "1937", "dtype": "float32"}, {"name": "1938", "dtype": "float32"}, {"name": "1939", "dtype": "float32"}, {"name": "1940", "dtype": "float32"}, {"name": "1941", "dtype": "float32"}, {"name": "1942", "dtype": "float32"}, {"name": "1943", "dtype": "float32"}, {"name": "1944", "dtype": "float32"}, {"name": "1945", "dtype": "float32"}, {"name": "1946", "dtype": "float32"}, {"name": "1947", "dtype": "float32"}, {"name": "1948", "dtype": "float32"}, {"name": "1949", "dtype": "float32"}, {"name": "1950", "dtype": "float32"}, {"name": "1951", "dtype": "float32"}, {"name": "1952", "dtype": "float32"}, {"name": "1953", "dtype": "float32"}, {"name": "1954", "dtype": "float32"}, {"name": "1955", "dtype": "float32"}, {"name": "1956", "dtype": "float32"}, {"name": "1957", "dtype": "float32"}, {"name": "1958", "dtype": "float32"}, {"name": "1959", "dtype": "float32"}, {"name": "1960", "dtype": "float32"}, {"name": "1961", "dtype": "float32"}, {"name": "1962", "dtype": "float32"}, {"name": "1963", "dtype": "float32"}, {"name": "1964", "dtype": "float32"}, {"name": "1965", "dtype": "float32"}, {"name": "1966", "dtype": "float32"}, {"name": "1967", "dtype": "float32"}, {"name": "1968", "dtype": "float32"}, {"name": "1969", "dtype": "float32"}, {"name": "1970", "dtype": "float32"}, {"name": "1971", "dtype": "float32"}, {"name": "1972", "dtype": "float32"}, {"name": "1973", "dtype": "float32"}, {"name": "1974", "dtype": "float32"}, {"name": "1975", "dtype": "float32"}, {"name": "1976", "dtype": "float32"}, {"name": "1977", "dtype": "float32"}, {"name": "1978", "dtype": "float32"}, {"name": "1979", "dtype": "float32"}, {"name": "1980", "dtype": "float32"}, {"name": "1981", "dtype": "float32"}, {"name": "1982", "dtype": "float32"}, {"name": "1983", "dtype": "float32"}, {"name": "1984", "dtype": "float32"}, {"name": "1985", "dtype": "float32"}, {"name": "1986", "dtype": "float32"}, {"name": "1987", "dtype": "float32"}, {"name": "1988", "dtype": "float32"}, {"name": "1989", "dtype": "float32"}, {"name": "1990", "dtype": "float32"}, {"name": "1991", "dtype": "float32"}, {"name": "1992", "dtype": "float32"}, {"name": "1993", "dtype": "float32"}, {"name": "1994", "dtype": "float32"}, {"name": "1995", "dtype": "float32"}, {"name": "1996", "dtype": "float32"}, {"name": "1997", "dtype": "float32"}, {"name": "1998", "dtype": "float32"}, {"name": "1999", "dtype": "float32"}, {"name": "2000", "dtype": "float32"}, {"name": "2001", "dtype": "float32"}, {"name": "2002", "dtype": "float32"}, {"name": "2003", "dtype": "float32"}, {"name": "2004", "dtype": "float32"}, {"name": "2005", "dtype": "float32"}, {"name": "2006", "dtype": "float32"}, {"name": "2007", "dtype": "float32"}, {"name": "2008", "dtype": "float32"}, {"name": "2009", "dtype": "float32"}, {"name": "2010", "dtype": "float32"}, {"name": "2011", "dtype": "float32"}, {"name": "2012", "dtype": "float32"}, {"name": "2013", "dtype": "float32"}, {"name": "2014", "dtype": "float32"}, {"name": "2015", "dtype": "float32"}, {"name": "2016", "dtype": "float32"}, {"name": "2017", "dtype": "float32"}, {"name": "2018", "dtype": "float32"}, {"name": "2019", "dtype": "float32"}, {"name": "2020", "dtype": "float32"}, {"name": "2021", "dtype": "float32"}, {"name": "2022", "dtype": "float32"}, {"name": "2023", "dtype": "float32"}, {"name": "2024", "dtype": "float32"}, {"name": "2025", "dtype": "float32"}, {"name": "2026", "dtype": "float32"}, {"name": "2027", "dtype": "float32"}, {"name": "2028", "dtype": "float32"}, {"name": "2029", "dtype": "float32"}, {"name": "2030", "dtype": "float32"}, {"name": "2031", "dtype": "float32"}, {"name": "2032", "dtype": "float32"}, {"name": "2033", "dtype": "float32"}, {"name": "2034", "dtype": "float32"}, {"name": "2035", "dtype": "float32"}, {"name": "2036", "dtype": "float32"}, {"name": "2037", "dtype": "float32"}, {"name": "2038", "dtype": "float32"}, {"name": "2039", "dtype": "float32"}, {"name": "2040", "dtype": "float32"}, {"name": "2041", "dtype": "float32"}, {"name": "2042", "dtype": "float32"}, {"name": "2043", "dtype": "float32"}, {"name": "2044", "dtype": "float32"}, {"name": "2045", "dtype": "float32"}, {"name": "2046", "dtype": "float32"}, {"name": "2047", "dtype": "float32"}, {"name": "label", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 307582709.0625, "num_examples": 37500}, {"name": "test", "num_bytes": 102527570.0, "num_examples": 12500}], "download_size": 565388038, "dataset_size": 410110279.0625}}
2023-08-18T15:00:07+00:00
[]
[]
TAGS #region-us
# Dataset Card for "BGL_GPTNEO_Baseline" More Information needed
[ "# Dataset Card for \"BGL_GPTNEO_Baseline\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"BGL_GPTNEO_Baseline\"\n\nMore Information needed" ]
[ 6, 19 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"BGL_GPTNEO_Baseline\"\n\nMore Information needed" ]
bff21b7a88ed0298637db29ba67a8aa58b451f67
# Dataset Card for "melanoma" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ThankGod/melanoma
[ "region:us" ]
2023-08-18T14:49:20+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "Actinic keratosis", "1": "Benign keratosis", "2": "Dermatofibroma", "3": "Melanocytic nevus", "4": "Vascular lesion"}}}}], "splits": [{"name": "train", "num_bytes": 5807355933.47, "num_examples": 14318}, {"name": "validation", "num_bytes": 406410771.682, "num_examples": 1262}, {"name": "test", "num_bytes": 393276175.928, "num_examples": 1278}], "download_size": 5993086085, "dataset_size": 6607042881.08}}
2023-08-18T14:52:54+00:00
[]
[]
TAGS #region-us
# Dataset Card for "melanoma" More Information needed
[ "# Dataset Card for \"melanoma\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"melanoma\"\n\nMore Information needed" ]
[ 6, 12 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"melanoma\"\n\nMore Information needed" ]
35477b62514168378879ae5edfe24089d055f3b2
# Dataset of yorigami_shion/依神紫苑/요리가미시온 (Touhou) This is the dataset of yorigami_shion/依神紫苑/요리가미시온 (Touhou), containing 500 images and their tags. The core tags of this character are `blue_hair, long_hair, bow, hair_bow, blue_eyes, blue_bow, hair_between_eyes, very_long_hair, bangs`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:-----------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 500 | 735.41 MiB | [Download](https://huggingface.co/datasets/CyberHarem/yorigami_shion_touhou/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 500 | 407.46 MiB | [Download](https://huggingface.co/datasets/CyberHarem/yorigami_shion_touhou/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 1223 | 866.61 MiB | [Download](https://huggingface.co/datasets/CyberHarem/yorigami_shion_touhou/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 500 | 646.68 MiB | [Download](https://huggingface.co/datasets/CyberHarem/yorigami_shion_touhou/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 1223 | 1.24 GiB | [Download](https://huggingface.co/datasets/CyberHarem/yorigami_shion_touhou/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/yorigami_shion_touhou', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 0 | 11 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, bangle, blue_skirt, debt, grey_hoodie, looking_at_viewer, short_sleeves, solo, simple_background, closed_mouth, drawstring, cowboy_shot, white_background, collarbone, miniskirt, standing, blush | | 1 | 9 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, bangle, blue_skirt, debt, grey_hoodie, looking_at_viewer, short_sleeves, solo, stuffed_cat, white_background, simple_background, open_mouth, holding | | 2 | 11 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, looking_at_viewer, solo, fake_animal_ears, playboy_bunny, rabbit_ears, strapless_leotard, alternate_costume, bare_shoulders, blush, simple_background, black_leotard, debt, pantyhose, white_background, wrist_cuffs, cleavage, detached_collar, rabbit_tail, blue_leotard, bowtie, small_breasts | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | bangle | blue_skirt | debt | grey_hoodie | looking_at_viewer | short_sleeves | solo | simple_background | closed_mouth | drawstring | cowboy_shot | white_background | collarbone | miniskirt | standing | blush | stuffed_cat | open_mouth | holding | fake_animal_ears | playboy_bunny | rabbit_ears | strapless_leotard | alternate_costume | bare_shoulders | black_leotard | pantyhose | wrist_cuffs | cleavage | detached_collar | rabbit_tail | blue_leotard | bowtie | small_breasts | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:---------|:-------------|:-------|:--------------|:--------------------|:----------------|:-------|:--------------------|:---------------|:-------------|:--------------|:-------------------|:-------------|:------------|:-----------|:--------|:--------------|:-------------|:----------|:-------------------|:----------------|:--------------|:--------------------|:--------------------|:-----------------|:----------------|:------------|:--------------|:-----------|:------------------|:--------------|:---------------|:---------|:----------------| | 0 | 11 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | 1 | 9 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | X | X | X | X | X | X | X | | | | X | | | | | X | X | X | | | | | | | | | | | | | | | | | 2 | 11 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | | | X | | X | | X | X | | | | X | | | | X | | | | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X |
CyberHarem/yorigami_shion_touhou
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-18T14:50:41+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-01-14T22:20:29+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of yorigami\_shion/依神紫苑/요리가미시온 (Touhou) =============================================== This is the dataset of yorigami\_shion/依神紫苑/요리가미시온 (Touhou), containing 500 images and their tags. The core tags of this character are 'blue\_hair, long\_hair, bow, hair\_bow, blue\_eyes, blue\_bow, hair\_between\_eyes, very\_long\_hair, bangs', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ 44, 61, 5, 4 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.### Raw Text Version### Table Version" ]
6635d59d3e8f1922e72c27d79dda7af39641f5d7
# Dataset Card for "BGL_BERT_Finetuned" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
EgilKarlsen/BGL_BERT_Finetuned
[ "region:us" ]
2023-08-18T15:02:43+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "0", "dtype": "float32"}, {"name": "1", "dtype": "float32"}, {"name": "2", "dtype": "float32"}, {"name": "3", "dtype": "float32"}, {"name": "4", "dtype": "float32"}, {"name": "5", "dtype": "float32"}, {"name": "6", "dtype": "float32"}, {"name": "7", "dtype": "float32"}, {"name": "8", "dtype": "float32"}, {"name": "9", "dtype": "float32"}, {"name": "10", "dtype": "float32"}, {"name": "11", "dtype": "float32"}, {"name": "12", "dtype": "float32"}, {"name": "13", "dtype": "float32"}, {"name": "14", "dtype": "float32"}, {"name": "15", "dtype": "float32"}, {"name": "16", "dtype": "float32"}, {"name": "17", "dtype": "float32"}, {"name": "18", "dtype": "float32"}, {"name": "19", "dtype": "float32"}, {"name": "20", "dtype": "float32"}, {"name": "21", "dtype": "float32"}, {"name": "22", "dtype": "float32"}, {"name": "23", "dtype": "float32"}, {"name": "24", "dtype": "float32"}, {"name": "25", "dtype": "float32"}, {"name": "26", "dtype": "float32"}, {"name": "27", "dtype": "float32"}, {"name": "28", "dtype": "float32"}, {"name": "29", "dtype": "float32"}, {"name": "30", "dtype": "float32"}, {"name": "31", "dtype": "float32"}, {"name": "32", "dtype": "float32"}, {"name": "33", "dtype": "float32"}, {"name": "34", "dtype": "float32"}, {"name": "35", "dtype": "float32"}, {"name": "36", "dtype": "float32"}, {"name": "37", "dtype": "float32"}, {"name": "38", "dtype": "float32"}, {"name": "39", "dtype": "float32"}, {"name": "40", "dtype": "float32"}, {"name": "41", "dtype": "float32"}, {"name": "42", "dtype": "float32"}, {"name": "43", "dtype": "float32"}, {"name": "44", "dtype": "float32"}, {"name": "45", "dtype": "float32"}, {"name": "46", "dtype": "float32"}, {"name": "47", "dtype": "float32"}, {"name": "48", "dtype": "float32"}, {"name": "49", "dtype": "float32"}, {"name": "50", "dtype": "float32"}, {"name": "51", "dtype": "float32"}, {"name": "52", "dtype": "float32"}, {"name": "53", "dtype": "float32"}, {"name": "54", "dtype": "float32"}, {"name": "55", "dtype": "float32"}, {"name": "56", "dtype": "float32"}, {"name": "57", "dtype": "float32"}, {"name": "58", "dtype": "float32"}, {"name": "59", "dtype": "float32"}, {"name": "60", "dtype": "float32"}, {"name": "61", "dtype": "float32"}, {"name": "62", "dtype": "float32"}, {"name": "63", "dtype": "float32"}, {"name": "64", "dtype": "float32"}, {"name": "65", "dtype": "float32"}, {"name": "66", "dtype": "float32"}, {"name": "67", "dtype": "float32"}, {"name": "68", "dtype": "float32"}, {"name": "69", "dtype": "float32"}, {"name": "70", "dtype": "float32"}, {"name": "71", "dtype": "float32"}, {"name": "72", "dtype": "float32"}, {"name": "73", "dtype": "float32"}, {"name": "74", "dtype": "float32"}, {"name": "75", "dtype": "float32"}, {"name": "76", "dtype": "float32"}, {"name": "77", "dtype": "float32"}, {"name": "78", "dtype": "float32"}, {"name": "79", "dtype": "float32"}, {"name": "80", "dtype": "float32"}, {"name": "81", "dtype": "float32"}, {"name": "82", "dtype": "float32"}, {"name": "83", "dtype": "float32"}, {"name": "84", "dtype": "float32"}, {"name": "85", "dtype": "float32"}, {"name": "86", "dtype": "float32"}, {"name": "87", "dtype": "float32"}, {"name": "88", "dtype": "float32"}, {"name": "89", "dtype": "float32"}, {"name": "90", "dtype": "float32"}, {"name": "91", "dtype": "float32"}, {"name": "92", "dtype": "float32"}, {"name": "93", "dtype": "float32"}, {"name": "94", "dtype": "float32"}, {"name": "95", "dtype": "float32"}, {"name": "96", "dtype": "float32"}, {"name": "97", "dtype": "float32"}, {"name": "98", "dtype": "float32"}, {"name": "99", "dtype": "float32"}, {"name": "100", "dtype": "float32"}, {"name": "101", "dtype": "float32"}, {"name": "102", "dtype": "float32"}, {"name": "103", "dtype": "float32"}, {"name": "104", "dtype": "float32"}, {"name": "105", "dtype": "float32"}, {"name": "106", "dtype": "float32"}, {"name": "107", "dtype": "float32"}, {"name": "108", "dtype": "float32"}, {"name": "109", "dtype": "float32"}, {"name": "110", "dtype": "float32"}, {"name": "111", "dtype": "float32"}, {"name": "112", "dtype": "float32"}, {"name": "113", "dtype": "float32"}, {"name": "114", "dtype": "float32"}, {"name": "115", "dtype": "float32"}, {"name": "116", "dtype": "float32"}, {"name": "117", "dtype": "float32"}, {"name": "118", "dtype": "float32"}, {"name": "119", "dtype": "float32"}, {"name": "120", "dtype": "float32"}, {"name": "121", "dtype": "float32"}, {"name": "122", "dtype": "float32"}, {"name": "123", "dtype": "float32"}, {"name": "124", "dtype": "float32"}, {"name": "125", "dtype": "float32"}, {"name": "126", "dtype": "float32"}, {"name": "127", "dtype": "float32"}, {"name": "128", "dtype": "float32"}, {"name": "129", "dtype": "float32"}, {"name": "130", "dtype": "float32"}, {"name": "131", "dtype": "float32"}, {"name": "132", "dtype": "float32"}, {"name": "133", "dtype": "float32"}, {"name": "134", "dtype": "float32"}, {"name": "135", "dtype": "float32"}, {"name": "136", "dtype": "float32"}, {"name": "137", "dtype": "float32"}, {"name": "138", "dtype": "float32"}, {"name": "139", "dtype": "float32"}, {"name": "140", "dtype": "float32"}, {"name": "141", "dtype": "float32"}, {"name": "142", "dtype": "float32"}, {"name": "143", "dtype": "float32"}, {"name": "144", "dtype": "float32"}, {"name": "145", "dtype": "float32"}, {"name": "146", "dtype": "float32"}, {"name": "147", "dtype": "float32"}, {"name": "148", "dtype": "float32"}, {"name": "149", "dtype": "float32"}, {"name": "150", "dtype": "float32"}, {"name": "151", "dtype": "float32"}, {"name": "152", "dtype": "float32"}, {"name": "153", "dtype": "float32"}, {"name": "154", "dtype": "float32"}, {"name": "155", "dtype": "float32"}, {"name": "156", "dtype": "float32"}, {"name": "157", "dtype": "float32"}, {"name": "158", "dtype": "float32"}, {"name": "159", "dtype": "float32"}, {"name": "160", "dtype": "float32"}, {"name": "161", "dtype": "float32"}, {"name": "162", "dtype": "float32"}, {"name": "163", "dtype": "float32"}, {"name": "164", "dtype": "float32"}, {"name": "165", "dtype": "float32"}, {"name": "166", "dtype": "float32"}, {"name": "167", "dtype": "float32"}, {"name": "168", "dtype": "float32"}, {"name": "169", "dtype": "float32"}, {"name": "170", "dtype": "float32"}, {"name": "171", "dtype": "float32"}, {"name": "172", "dtype": "float32"}, {"name": "173", "dtype": "float32"}, {"name": "174", "dtype": "float32"}, {"name": "175", "dtype": "float32"}, {"name": "176", "dtype": "float32"}, {"name": "177", "dtype": "float32"}, {"name": "178", "dtype": "float32"}, {"name": "179", "dtype": "float32"}, {"name": "180", "dtype": "float32"}, {"name": "181", "dtype": "float32"}, {"name": "182", "dtype": "float32"}, {"name": "183", "dtype": "float32"}, {"name": "184", "dtype": "float32"}, {"name": "185", "dtype": "float32"}, {"name": "186", "dtype": "float32"}, {"name": "187", "dtype": "float32"}, {"name": "188", "dtype": "float32"}, {"name": "189", "dtype": "float32"}, {"name": "190", "dtype": "float32"}, {"name": "191", "dtype": "float32"}, {"name": "192", "dtype": "float32"}, {"name": "193", "dtype": "float32"}, {"name": "194", "dtype": "float32"}, {"name": "195", "dtype": "float32"}, {"name": "196", "dtype": "float32"}, {"name": "197", "dtype": "float32"}, {"name": "198", "dtype": "float32"}, {"name": "199", "dtype": "float32"}, {"name": "200", "dtype": "float32"}, {"name": "201", "dtype": "float32"}, {"name": "202", "dtype": "float32"}, {"name": "203", "dtype": "float32"}, {"name": "204", "dtype": "float32"}, {"name": "205", "dtype": "float32"}, {"name": "206", "dtype": "float32"}, {"name": "207", "dtype": "float32"}, {"name": "208", "dtype": "float32"}, {"name": "209", "dtype": "float32"}, {"name": "210", "dtype": "float32"}, {"name": "211", "dtype": "float32"}, {"name": "212", "dtype": "float32"}, {"name": "213", "dtype": "float32"}, {"name": "214", "dtype": "float32"}, {"name": "215", "dtype": "float32"}, {"name": "216", "dtype": "float32"}, {"name": "217", "dtype": "float32"}, {"name": "218", "dtype": "float32"}, {"name": "219", "dtype": "float32"}, {"name": "220", "dtype": "float32"}, {"name": "221", "dtype": "float32"}, {"name": "222", "dtype": "float32"}, {"name": "223", "dtype": "float32"}, {"name": "224", "dtype": "float32"}, {"name": "225", "dtype": "float32"}, {"name": "226", "dtype": "float32"}, {"name": "227", "dtype": "float32"}, {"name": "228", "dtype": "float32"}, {"name": "229", "dtype": "float32"}, {"name": "230", "dtype": "float32"}, {"name": "231", "dtype": "float32"}, {"name": "232", "dtype": "float32"}, {"name": "233", "dtype": "float32"}, {"name": "234", "dtype": "float32"}, {"name": "235", "dtype": "float32"}, {"name": "236", "dtype": "float32"}, {"name": "237", "dtype": "float32"}, {"name": "238", "dtype": "float32"}, {"name": "239", "dtype": "float32"}, {"name": "240", "dtype": "float32"}, {"name": "241", "dtype": "float32"}, {"name": "242", "dtype": "float32"}, {"name": "243", "dtype": "float32"}, {"name": "244", "dtype": "float32"}, {"name": "245", "dtype": "float32"}, {"name": "246", "dtype": "float32"}, {"name": "247", "dtype": "float32"}, {"name": "248", "dtype": "float32"}, {"name": "249", "dtype": "float32"}, {"name": "250", "dtype": "float32"}, {"name": "251", "dtype": "float32"}, {"name": "252", "dtype": "float32"}, {"name": "253", "dtype": "float32"}, {"name": "254", "dtype": "float32"}, {"name": "255", "dtype": "float32"}, {"name": "256", "dtype": "float32"}, {"name": "257", "dtype": "float32"}, {"name": "258", "dtype": "float32"}, {"name": "259", "dtype": "float32"}, {"name": "260", "dtype": "float32"}, {"name": "261", "dtype": "float32"}, {"name": "262", "dtype": "float32"}, {"name": "263", "dtype": "float32"}, {"name": "264", "dtype": "float32"}, {"name": "265", "dtype": "float32"}, {"name": "266", "dtype": "float32"}, {"name": "267", "dtype": "float32"}, {"name": "268", "dtype": "float32"}, {"name": "269", "dtype": "float32"}, {"name": "270", "dtype": "float32"}, {"name": "271", "dtype": "float32"}, {"name": "272", "dtype": "float32"}, {"name": "273", "dtype": "float32"}, {"name": "274", "dtype": "float32"}, {"name": "275", "dtype": "float32"}, {"name": "276", "dtype": "float32"}, {"name": "277", "dtype": "float32"}, {"name": "278", "dtype": "float32"}, {"name": "279", "dtype": "float32"}, {"name": "280", "dtype": "float32"}, {"name": "281", "dtype": "float32"}, {"name": "282", "dtype": "float32"}, {"name": "283", "dtype": "float32"}, {"name": "284", "dtype": "float32"}, {"name": "285", "dtype": "float32"}, {"name": "286", "dtype": "float32"}, {"name": "287", "dtype": "float32"}, {"name": "288", "dtype": "float32"}, {"name": "289", "dtype": "float32"}, {"name": "290", "dtype": "float32"}, {"name": "291", "dtype": "float32"}, {"name": "292", "dtype": "float32"}, {"name": "293", "dtype": "float32"}, {"name": "294", "dtype": "float32"}, {"name": "295", "dtype": "float32"}, {"name": "296", "dtype": "float32"}, {"name": "297", "dtype": "float32"}, {"name": "298", "dtype": "float32"}, {"name": "299", "dtype": "float32"}, {"name": "300", "dtype": "float32"}, {"name": "301", "dtype": "float32"}, {"name": "302", "dtype": "float32"}, {"name": "303", "dtype": "float32"}, {"name": "304", "dtype": "float32"}, {"name": "305", "dtype": "float32"}, {"name": "306", "dtype": "float32"}, {"name": "307", "dtype": "float32"}, {"name": "308", "dtype": "float32"}, {"name": "309", "dtype": "float32"}, {"name": "310", "dtype": "float32"}, {"name": "311", "dtype": "float32"}, {"name": "312", "dtype": "float32"}, {"name": "313", "dtype": "float32"}, {"name": "314", "dtype": "float32"}, {"name": "315", "dtype": "float32"}, {"name": "316", "dtype": "float32"}, {"name": "317", "dtype": "float32"}, {"name": "318", "dtype": "float32"}, {"name": "319", "dtype": "float32"}, {"name": "320", "dtype": "float32"}, {"name": "321", "dtype": "float32"}, {"name": "322", "dtype": "float32"}, {"name": "323", "dtype": "float32"}, {"name": "324", "dtype": "float32"}, {"name": "325", "dtype": "float32"}, {"name": "326", "dtype": "float32"}, {"name": "327", "dtype": "float32"}, {"name": "328", "dtype": "float32"}, {"name": "329", "dtype": "float32"}, {"name": "330", "dtype": "float32"}, {"name": "331", "dtype": "float32"}, {"name": "332", "dtype": "float32"}, {"name": "333", "dtype": "float32"}, {"name": "334", "dtype": "float32"}, {"name": "335", "dtype": "float32"}, {"name": "336", "dtype": "float32"}, {"name": "337", "dtype": "float32"}, {"name": "338", "dtype": "float32"}, {"name": "339", "dtype": "float32"}, {"name": "340", "dtype": "float32"}, {"name": "341", "dtype": "float32"}, {"name": "342", "dtype": "float32"}, {"name": "343", "dtype": "float32"}, {"name": "344", "dtype": "float32"}, {"name": "345", "dtype": "float32"}, {"name": "346", "dtype": "float32"}, {"name": "347", "dtype": "float32"}, {"name": "348", "dtype": "float32"}, {"name": "349", "dtype": "float32"}, {"name": "350", "dtype": "float32"}, {"name": "351", "dtype": "float32"}, {"name": "352", "dtype": "float32"}, {"name": "353", "dtype": "float32"}, {"name": "354", "dtype": "float32"}, {"name": "355", "dtype": "float32"}, {"name": "356", "dtype": "float32"}, {"name": "357", "dtype": "float32"}, {"name": "358", "dtype": "float32"}, {"name": "359", "dtype": "float32"}, {"name": "360", "dtype": "float32"}, {"name": "361", "dtype": "float32"}, {"name": "362", "dtype": "float32"}, {"name": "363", "dtype": "float32"}, {"name": "364", "dtype": "float32"}, {"name": "365", "dtype": "float32"}, {"name": "366", "dtype": "float32"}, {"name": "367", "dtype": "float32"}, {"name": "368", "dtype": "float32"}, {"name": "369", "dtype": "float32"}, {"name": "370", "dtype": "float32"}, {"name": "371", "dtype": "float32"}, {"name": "372", "dtype": "float32"}, {"name": "373", "dtype": "float32"}, {"name": "374", "dtype": "float32"}, {"name": "375", "dtype": "float32"}, {"name": "376", "dtype": "float32"}, {"name": "377", "dtype": "float32"}, {"name": "378", "dtype": "float32"}, {"name": "379", "dtype": "float32"}, {"name": "380", "dtype": "float32"}, {"name": "381", "dtype": "float32"}, {"name": "382", "dtype": "float32"}, {"name": "383", "dtype": "float32"}, {"name": "384", "dtype": "float32"}, {"name": "385", "dtype": "float32"}, {"name": "386", "dtype": "float32"}, {"name": "387", "dtype": "float32"}, {"name": "388", "dtype": "float32"}, {"name": "389", "dtype": "float32"}, {"name": "390", "dtype": "float32"}, {"name": "391", "dtype": "float32"}, {"name": "392", "dtype": "float32"}, {"name": "393", "dtype": "float32"}, {"name": "394", "dtype": "float32"}, {"name": "395", "dtype": "float32"}, {"name": "396", "dtype": "float32"}, {"name": "397", "dtype": "float32"}, {"name": "398", "dtype": "float32"}, {"name": "399", "dtype": "float32"}, {"name": "400", "dtype": "float32"}, {"name": "401", "dtype": "float32"}, {"name": "402", "dtype": "float32"}, {"name": "403", "dtype": "float32"}, {"name": "404", "dtype": "float32"}, {"name": "405", "dtype": "float32"}, {"name": "406", "dtype": "float32"}, {"name": "407", "dtype": "float32"}, {"name": "408", "dtype": "float32"}, {"name": "409", "dtype": "float32"}, {"name": "410", "dtype": "float32"}, {"name": "411", "dtype": "float32"}, {"name": "412", "dtype": "float32"}, {"name": "413", "dtype": "float32"}, {"name": "414", "dtype": "float32"}, {"name": "415", "dtype": "float32"}, {"name": "416", "dtype": "float32"}, {"name": "417", "dtype": "float32"}, {"name": "418", "dtype": "float32"}, {"name": "419", "dtype": "float32"}, {"name": "420", "dtype": "float32"}, {"name": "421", "dtype": "float32"}, {"name": "422", "dtype": "float32"}, {"name": "423", "dtype": "float32"}, {"name": "424", "dtype": "float32"}, {"name": "425", "dtype": "float32"}, {"name": "426", "dtype": "float32"}, {"name": "427", "dtype": "float32"}, {"name": "428", "dtype": "float32"}, {"name": "429", "dtype": "float32"}, {"name": "430", "dtype": "float32"}, {"name": "431", "dtype": "float32"}, {"name": "432", "dtype": "float32"}, {"name": "433", "dtype": "float32"}, {"name": "434", "dtype": "float32"}, {"name": "435", "dtype": "float32"}, {"name": "436", "dtype": "float32"}, {"name": "437", "dtype": "float32"}, {"name": "438", "dtype": "float32"}, {"name": "439", "dtype": "float32"}, {"name": "440", "dtype": "float32"}, {"name": "441", "dtype": "float32"}, {"name": "442", "dtype": "float32"}, {"name": "443", "dtype": "float32"}, {"name": "444", "dtype": "float32"}, {"name": "445", "dtype": "float32"}, {"name": "446", "dtype": "float32"}, {"name": "447", "dtype": "float32"}, {"name": "448", "dtype": "float32"}, {"name": "449", "dtype": "float32"}, {"name": "450", "dtype": "float32"}, {"name": "451", "dtype": "float32"}, {"name": "452", "dtype": "float32"}, {"name": "453", "dtype": "float32"}, {"name": "454", "dtype": "float32"}, {"name": "455", "dtype": "float32"}, {"name": "456", "dtype": "float32"}, {"name": "457", "dtype": "float32"}, {"name": "458", "dtype": "float32"}, {"name": "459", "dtype": "float32"}, {"name": "460", "dtype": "float32"}, {"name": "461", "dtype": "float32"}, {"name": "462", "dtype": "float32"}, {"name": "463", "dtype": "float32"}, {"name": "464", "dtype": "float32"}, {"name": "465", "dtype": "float32"}, {"name": "466", "dtype": "float32"}, {"name": "467", "dtype": "float32"}, {"name": "468", "dtype": "float32"}, {"name": "469", "dtype": "float32"}, {"name": "470", "dtype": "float32"}, {"name": "471", "dtype": "float32"}, {"name": "472", "dtype": "float32"}, {"name": "473", "dtype": "float32"}, {"name": "474", "dtype": "float32"}, {"name": "475", "dtype": "float32"}, {"name": "476", "dtype": "float32"}, {"name": "477", "dtype": "float32"}, {"name": "478", "dtype": "float32"}, {"name": "479", "dtype": "float32"}, {"name": "480", "dtype": "float32"}, {"name": "481", "dtype": "float32"}, {"name": "482", "dtype": "float32"}, {"name": "483", "dtype": "float32"}, {"name": "484", "dtype": "float32"}, {"name": "485", "dtype": "float32"}, {"name": "486", "dtype": "float32"}, {"name": "487", "dtype": "float32"}, {"name": "488", "dtype": "float32"}, {"name": "489", "dtype": "float32"}, {"name": "490", "dtype": "float32"}, {"name": "491", "dtype": "float32"}, {"name": "492", "dtype": "float32"}, {"name": "493", "dtype": "float32"}, {"name": "494", "dtype": "float32"}, {"name": "495", "dtype": "float32"}, {"name": "496", "dtype": "float32"}, {"name": "497", "dtype": "float32"}, {"name": "498", "dtype": "float32"}, {"name": "499", "dtype": "float32"}, {"name": "500", "dtype": "float32"}, {"name": "501", "dtype": "float32"}, {"name": "502", "dtype": "float32"}, {"name": "503", "dtype": "float32"}, {"name": "504", "dtype": "float32"}, {"name": "505", "dtype": "float32"}, {"name": "506", "dtype": "float32"}, {"name": "507", "dtype": "float32"}, {"name": "508", "dtype": "float32"}, {"name": "509", "dtype": "float32"}, {"name": "510", "dtype": "float32"}, {"name": "511", "dtype": "float32"}, {"name": "512", "dtype": "float32"}, {"name": "513", "dtype": "float32"}, {"name": "514", "dtype": "float32"}, {"name": "515", "dtype": "float32"}, {"name": "516", "dtype": "float32"}, {"name": "517", "dtype": "float32"}, {"name": "518", "dtype": "float32"}, {"name": "519", "dtype": "float32"}, {"name": "520", "dtype": "float32"}, {"name": "521", "dtype": "float32"}, {"name": "522", "dtype": "float32"}, {"name": "523", "dtype": "float32"}, {"name": "524", "dtype": "float32"}, {"name": "525", "dtype": "float32"}, {"name": "526", "dtype": "float32"}, {"name": "527", "dtype": "float32"}, {"name": "528", "dtype": "float32"}, {"name": "529", "dtype": "float32"}, {"name": "530", "dtype": "float32"}, {"name": "531", "dtype": "float32"}, {"name": "532", "dtype": "float32"}, {"name": "533", "dtype": "float32"}, {"name": "534", "dtype": "float32"}, {"name": "535", "dtype": "float32"}, {"name": "536", "dtype": "float32"}, {"name": "537", "dtype": "float32"}, {"name": "538", "dtype": "float32"}, {"name": "539", "dtype": "float32"}, {"name": "540", "dtype": "float32"}, {"name": "541", "dtype": "float32"}, {"name": "542", "dtype": "float32"}, {"name": "543", "dtype": "float32"}, {"name": "544", "dtype": "float32"}, {"name": "545", "dtype": "float32"}, {"name": "546", "dtype": "float32"}, {"name": "547", "dtype": "float32"}, {"name": "548", "dtype": "float32"}, {"name": "549", "dtype": "float32"}, {"name": "550", "dtype": "float32"}, {"name": "551", "dtype": "float32"}, {"name": "552", "dtype": "float32"}, {"name": "553", "dtype": "float32"}, {"name": "554", "dtype": "float32"}, {"name": "555", "dtype": "float32"}, {"name": "556", "dtype": "float32"}, {"name": "557", "dtype": "float32"}, {"name": "558", "dtype": "float32"}, {"name": "559", "dtype": "float32"}, {"name": "560", "dtype": "float32"}, {"name": "561", "dtype": "float32"}, {"name": "562", "dtype": "float32"}, {"name": "563", "dtype": "float32"}, {"name": "564", "dtype": "float32"}, {"name": "565", "dtype": "float32"}, {"name": "566", "dtype": "float32"}, {"name": "567", "dtype": "float32"}, {"name": "568", "dtype": "float32"}, {"name": "569", "dtype": "float32"}, {"name": "570", "dtype": "float32"}, {"name": "571", "dtype": "float32"}, {"name": "572", "dtype": "float32"}, {"name": "573", "dtype": "float32"}, {"name": "574", "dtype": "float32"}, {"name": "575", "dtype": "float32"}, {"name": "576", "dtype": "float32"}, {"name": "577", "dtype": "float32"}, {"name": "578", "dtype": "float32"}, {"name": "579", "dtype": "float32"}, {"name": "580", "dtype": "float32"}, {"name": "581", "dtype": "float32"}, {"name": "582", "dtype": "float32"}, {"name": "583", "dtype": "float32"}, {"name": "584", "dtype": "float32"}, {"name": "585", "dtype": "float32"}, {"name": "586", "dtype": "float32"}, {"name": "587", "dtype": "float32"}, {"name": "588", "dtype": "float32"}, {"name": "589", "dtype": "float32"}, {"name": "590", "dtype": "float32"}, {"name": "591", "dtype": "float32"}, {"name": "592", "dtype": "float32"}, {"name": "593", "dtype": "float32"}, {"name": "594", "dtype": "float32"}, {"name": "595", "dtype": "float32"}, {"name": "596", "dtype": "float32"}, {"name": "597", "dtype": "float32"}, {"name": "598", "dtype": "float32"}, {"name": "599", "dtype": "float32"}, {"name": "600", "dtype": "float32"}, {"name": "601", "dtype": "float32"}, {"name": "602", "dtype": "float32"}, {"name": "603", "dtype": "float32"}, {"name": "604", "dtype": "float32"}, {"name": "605", "dtype": "float32"}, {"name": "606", "dtype": "float32"}, {"name": "607", "dtype": "float32"}, {"name": "608", "dtype": "float32"}, {"name": "609", "dtype": "float32"}, {"name": "610", "dtype": "float32"}, {"name": "611", "dtype": "float32"}, {"name": "612", "dtype": "float32"}, {"name": "613", "dtype": "float32"}, {"name": "614", "dtype": "float32"}, {"name": "615", "dtype": "float32"}, {"name": "616", "dtype": "float32"}, {"name": "617", "dtype": "float32"}, {"name": "618", "dtype": "float32"}, {"name": "619", "dtype": "float32"}, {"name": "620", "dtype": "float32"}, {"name": "621", "dtype": "float32"}, {"name": "622", "dtype": "float32"}, {"name": "623", "dtype": "float32"}, {"name": "624", "dtype": "float32"}, {"name": "625", "dtype": "float32"}, {"name": "626", "dtype": "float32"}, {"name": "627", "dtype": "float32"}, {"name": "628", "dtype": "float32"}, {"name": "629", "dtype": "float32"}, {"name": "630", "dtype": "float32"}, {"name": "631", "dtype": "float32"}, {"name": "632", "dtype": "float32"}, {"name": "633", "dtype": "float32"}, {"name": "634", "dtype": "float32"}, {"name": "635", "dtype": "float32"}, {"name": "636", "dtype": "float32"}, {"name": "637", "dtype": "float32"}, {"name": "638", "dtype": "float32"}, {"name": "639", "dtype": "float32"}, {"name": "640", "dtype": "float32"}, {"name": "641", "dtype": "float32"}, {"name": "642", "dtype": "float32"}, {"name": "643", "dtype": "float32"}, {"name": "644", "dtype": "float32"}, {"name": "645", "dtype": "float32"}, {"name": "646", "dtype": "float32"}, {"name": "647", "dtype": "float32"}, {"name": "648", "dtype": "float32"}, {"name": "649", "dtype": "float32"}, {"name": "650", "dtype": "float32"}, {"name": "651", "dtype": "float32"}, {"name": "652", "dtype": "float32"}, {"name": "653", "dtype": "float32"}, {"name": "654", "dtype": "float32"}, {"name": "655", "dtype": "float32"}, {"name": "656", "dtype": "float32"}, {"name": "657", "dtype": "float32"}, {"name": "658", "dtype": "float32"}, {"name": "659", "dtype": "float32"}, {"name": "660", "dtype": "float32"}, {"name": "661", "dtype": "float32"}, {"name": "662", "dtype": "float32"}, {"name": "663", "dtype": "float32"}, {"name": "664", "dtype": "float32"}, {"name": "665", "dtype": "float32"}, {"name": "666", "dtype": "float32"}, {"name": "667", "dtype": "float32"}, {"name": "668", "dtype": "float32"}, {"name": "669", "dtype": "float32"}, {"name": "670", "dtype": "float32"}, {"name": "671", "dtype": "float32"}, {"name": "672", "dtype": "float32"}, {"name": "673", "dtype": "float32"}, {"name": "674", "dtype": "float32"}, {"name": "675", "dtype": "float32"}, {"name": "676", "dtype": "float32"}, {"name": "677", "dtype": "float32"}, {"name": "678", "dtype": "float32"}, {"name": "679", "dtype": "float32"}, {"name": "680", "dtype": "float32"}, {"name": "681", "dtype": "float32"}, {"name": "682", "dtype": "float32"}, {"name": "683", "dtype": "float32"}, {"name": "684", "dtype": "float32"}, {"name": "685", "dtype": "float32"}, {"name": "686", "dtype": "float32"}, {"name": "687", "dtype": "float32"}, {"name": "688", "dtype": "float32"}, {"name": "689", "dtype": "float32"}, {"name": "690", "dtype": "float32"}, {"name": "691", "dtype": "float32"}, {"name": "692", "dtype": "float32"}, {"name": "693", "dtype": "float32"}, {"name": "694", "dtype": "float32"}, {"name": "695", "dtype": "float32"}, {"name": "696", "dtype": "float32"}, {"name": "697", "dtype": "float32"}, {"name": "698", "dtype": "float32"}, {"name": "699", "dtype": "float32"}, {"name": "700", "dtype": "float32"}, {"name": "701", "dtype": "float32"}, {"name": "702", "dtype": "float32"}, {"name": "703", "dtype": "float32"}, {"name": "704", "dtype": "float32"}, {"name": "705", "dtype": "float32"}, {"name": "706", "dtype": "float32"}, {"name": "707", "dtype": "float32"}, {"name": "708", "dtype": "float32"}, {"name": "709", "dtype": "float32"}, {"name": "710", "dtype": "float32"}, {"name": "711", "dtype": "float32"}, {"name": "712", "dtype": "float32"}, {"name": "713", "dtype": "float32"}, {"name": "714", "dtype": "float32"}, {"name": "715", "dtype": "float32"}, {"name": "716", "dtype": "float32"}, {"name": "717", "dtype": "float32"}, {"name": "718", "dtype": "float32"}, {"name": "719", "dtype": "float32"}, {"name": "720", "dtype": "float32"}, {"name": "721", "dtype": "float32"}, {"name": "722", "dtype": "float32"}, {"name": "723", "dtype": "float32"}, {"name": "724", "dtype": "float32"}, {"name": "725", "dtype": "float32"}, {"name": "726", "dtype": "float32"}, {"name": "727", "dtype": "float32"}, {"name": "728", "dtype": "float32"}, {"name": "729", "dtype": "float32"}, {"name": "730", "dtype": "float32"}, {"name": "731", "dtype": "float32"}, {"name": "732", "dtype": "float32"}, {"name": "733", "dtype": "float32"}, {"name": "734", "dtype": "float32"}, {"name": "735", "dtype": "float32"}, {"name": "736", "dtype": "float32"}, {"name": "737", "dtype": "float32"}, {"name": "738", "dtype": "float32"}, {"name": "739", "dtype": "float32"}, {"name": "740", "dtype": "float32"}, {"name": "741", "dtype": "float32"}, {"name": "742", "dtype": "float32"}, {"name": "743", "dtype": "float32"}, {"name": "744", "dtype": "float32"}, {"name": "745", "dtype": "float32"}, {"name": "746", "dtype": "float32"}, {"name": "747", "dtype": "float32"}, {"name": "748", "dtype": "float32"}, {"name": "749", "dtype": "float32"}, {"name": "750", "dtype": "float32"}, {"name": "751", "dtype": "float32"}, {"name": "752", "dtype": "float32"}, {"name": "753", "dtype": "float32"}, {"name": "754", "dtype": "float32"}, {"name": "755", "dtype": "float32"}, {"name": "756", "dtype": "float32"}, {"name": "757", "dtype": "float32"}, {"name": "758", "dtype": "float32"}, {"name": "759", "dtype": "float32"}, {"name": "760", "dtype": "float32"}, {"name": "761", "dtype": "float32"}, {"name": "762", "dtype": "float32"}, {"name": "763", "dtype": "float32"}, {"name": "764", "dtype": "float32"}, {"name": "765", "dtype": "float32"}, {"name": "766", "dtype": "float32"}, {"name": "767", "dtype": "float32"}, {"name": "label", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 115582709.0625, "num_examples": 37500}, {"name": "test", "num_bytes": 38527570.0, "num_examples": 12500}], "download_size": 211883038, "dataset_size": 154110279.0625}}
2023-08-23T05:03:08+00:00
[]
[]
TAGS #region-us
# Dataset Card for "BGL_BERT_Finetuned" More Information needed
[ "# Dataset Card for \"BGL_BERT_Finetuned\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"BGL_BERT_Finetuned\"\n\nMore Information needed" ]
[ 6, 20 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"BGL_BERT_Finetuned\"\n\nMore Information needed" ]
4c40de7b01913d772d7da92fb6948bf05422211d
# Dataset Card for "BGL_RoBERTa_Finetuned" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
EgilKarlsen/BGL_RoBERTa_Finetuned
[ "region:us" ]
2023-08-18T15:10:37+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "0", "dtype": "float32"}, {"name": "1", "dtype": "float32"}, {"name": "2", "dtype": "float32"}, {"name": "3", "dtype": "float32"}, {"name": "4", "dtype": "float32"}, {"name": "5", "dtype": "float32"}, {"name": "6", "dtype": "float32"}, {"name": "7", "dtype": "float32"}, {"name": "8", "dtype": "float32"}, {"name": "9", "dtype": "float32"}, {"name": "10", "dtype": "float32"}, {"name": "11", "dtype": "float32"}, {"name": "12", "dtype": "float32"}, {"name": "13", "dtype": "float32"}, {"name": "14", "dtype": "float32"}, {"name": "15", "dtype": "float32"}, {"name": "16", "dtype": "float32"}, {"name": "17", "dtype": "float32"}, {"name": "18", "dtype": "float32"}, {"name": "19", "dtype": "float32"}, {"name": "20", "dtype": "float32"}, {"name": "21", "dtype": "float32"}, {"name": "22", "dtype": "float32"}, {"name": "23", "dtype": "float32"}, {"name": "24", "dtype": "float32"}, {"name": "25", "dtype": "float32"}, {"name": "26", "dtype": "float32"}, {"name": "27", "dtype": "float32"}, {"name": "28", "dtype": "float32"}, {"name": "29", "dtype": "float32"}, {"name": "30", "dtype": "float32"}, {"name": "31", "dtype": "float32"}, {"name": "32", "dtype": "float32"}, {"name": "33", "dtype": "float32"}, {"name": "34", "dtype": "float32"}, {"name": "35", "dtype": "float32"}, {"name": "36", "dtype": "float32"}, {"name": "37", "dtype": "float32"}, {"name": "38", "dtype": "float32"}, {"name": "39", "dtype": "float32"}, {"name": "40", "dtype": "float32"}, {"name": "41", "dtype": "float32"}, {"name": "42", "dtype": "float32"}, {"name": "43", "dtype": "float32"}, {"name": "44", "dtype": "float32"}, {"name": "45", "dtype": "float32"}, {"name": "46", "dtype": "float32"}, {"name": "47", "dtype": "float32"}, {"name": "48", "dtype": "float32"}, {"name": "49", "dtype": "float32"}, {"name": "50", "dtype": "float32"}, {"name": "51", "dtype": "float32"}, {"name": "52", "dtype": "float32"}, {"name": "53", "dtype": "float32"}, {"name": "54", "dtype": "float32"}, {"name": "55", "dtype": "float32"}, {"name": "56", "dtype": "float32"}, {"name": "57", "dtype": "float32"}, {"name": "58", "dtype": "float32"}, {"name": "59", "dtype": "float32"}, {"name": "60", "dtype": "float32"}, {"name": "61", "dtype": "float32"}, {"name": "62", "dtype": "float32"}, {"name": "63", "dtype": "float32"}, {"name": "64", "dtype": "float32"}, {"name": "65", "dtype": "float32"}, {"name": "66", "dtype": "float32"}, {"name": "67", "dtype": "float32"}, {"name": "68", "dtype": "float32"}, {"name": "69", "dtype": "float32"}, {"name": "70", "dtype": "float32"}, {"name": "71", "dtype": "float32"}, {"name": "72", "dtype": "float32"}, {"name": "73", "dtype": "float32"}, {"name": "74", "dtype": "float32"}, {"name": "75", "dtype": "float32"}, {"name": "76", "dtype": "float32"}, {"name": "77", "dtype": "float32"}, {"name": "78", "dtype": "float32"}, {"name": "79", "dtype": "float32"}, {"name": "80", "dtype": "float32"}, {"name": "81", "dtype": "float32"}, {"name": "82", "dtype": "float32"}, {"name": "83", "dtype": "float32"}, {"name": "84", "dtype": "float32"}, {"name": "85", "dtype": "float32"}, {"name": "86", "dtype": "float32"}, {"name": "87", "dtype": "float32"}, {"name": "88", "dtype": "float32"}, {"name": "89", "dtype": "float32"}, {"name": "90", "dtype": "float32"}, {"name": "91", "dtype": "float32"}, {"name": "92", "dtype": "float32"}, {"name": "93", "dtype": "float32"}, {"name": "94", "dtype": "float32"}, {"name": "95", "dtype": "float32"}, {"name": "96", "dtype": "float32"}, {"name": "97", "dtype": "float32"}, {"name": "98", "dtype": "float32"}, {"name": "99", "dtype": "float32"}, {"name": "100", "dtype": "float32"}, {"name": "101", "dtype": "float32"}, {"name": "102", "dtype": "float32"}, {"name": "103", "dtype": "float32"}, {"name": "104", "dtype": "float32"}, {"name": "105", "dtype": "float32"}, {"name": "106", "dtype": "float32"}, {"name": "107", "dtype": "float32"}, {"name": "108", "dtype": "float32"}, {"name": "109", "dtype": "float32"}, {"name": "110", "dtype": "float32"}, {"name": "111", "dtype": "float32"}, {"name": "112", "dtype": "float32"}, {"name": "113", "dtype": "float32"}, {"name": "114", "dtype": "float32"}, {"name": "115", "dtype": "float32"}, {"name": "116", "dtype": "float32"}, {"name": "117", "dtype": "float32"}, {"name": "118", "dtype": "float32"}, {"name": "119", "dtype": "float32"}, {"name": "120", "dtype": "float32"}, {"name": "121", "dtype": "float32"}, {"name": "122", "dtype": "float32"}, {"name": "123", "dtype": "float32"}, {"name": "124", "dtype": "float32"}, {"name": "125", "dtype": "float32"}, {"name": "126", "dtype": "float32"}, {"name": "127", "dtype": "float32"}, {"name": "128", "dtype": "float32"}, {"name": "129", "dtype": "float32"}, {"name": "130", "dtype": "float32"}, {"name": "131", "dtype": "float32"}, {"name": "132", "dtype": "float32"}, {"name": "133", "dtype": "float32"}, {"name": "134", "dtype": "float32"}, {"name": "135", "dtype": "float32"}, {"name": "136", "dtype": "float32"}, {"name": "137", "dtype": "float32"}, {"name": "138", "dtype": "float32"}, {"name": "139", "dtype": "float32"}, {"name": "140", "dtype": "float32"}, {"name": "141", "dtype": "float32"}, {"name": "142", "dtype": "float32"}, {"name": "143", "dtype": "float32"}, {"name": "144", "dtype": "float32"}, {"name": "145", "dtype": "float32"}, {"name": "146", "dtype": "float32"}, {"name": "147", "dtype": "float32"}, {"name": "148", "dtype": "float32"}, {"name": "149", "dtype": "float32"}, {"name": "150", "dtype": "float32"}, {"name": "151", "dtype": "float32"}, {"name": "152", "dtype": "float32"}, {"name": "153", "dtype": "float32"}, {"name": "154", "dtype": "float32"}, {"name": "155", "dtype": "float32"}, {"name": "156", "dtype": "float32"}, {"name": "157", "dtype": "float32"}, {"name": "158", "dtype": "float32"}, {"name": "159", "dtype": "float32"}, {"name": "160", "dtype": "float32"}, {"name": "161", "dtype": "float32"}, {"name": "162", "dtype": "float32"}, {"name": "163", "dtype": "float32"}, {"name": "164", "dtype": "float32"}, {"name": "165", "dtype": "float32"}, {"name": "166", "dtype": "float32"}, {"name": "167", "dtype": "float32"}, {"name": "168", "dtype": "float32"}, {"name": "169", "dtype": "float32"}, {"name": "170", "dtype": "float32"}, {"name": "171", "dtype": "float32"}, {"name": "172", "dtype": "float32"}, {"name": "173", "dtype": "float32"}, {"name": "174", "dtype": "float32"}, {"name": "175", "dtype": "float32"}, {"name": "176", "dtype": "float32"}, {"name": "177", "dtype": "float32"}, {"name": "178", "dtype": "float32"}, {"name": "179", "dtype": "float32"}, {"name": "180", "dtype": "float32"}, {"name": "181", "dtype": "float32"}, {"name": "182", "dtype": "float32"}, {"name": "183", "dtype": "float32"}, {"name": "184", "dtype": "float32"}, {"name": "185", "dtype": "float32"}, {"name": "186", "dtype": "float32"}, {"name": "187", "dtype": "float32"}, {"name": "188", "dtype": "float32"}, {"name": "189", "dtype": "float32"}, {"name": "190", "dtype": "float32"}, {"name": "191", "dtype": "float32"}, {"name": "192", "dtype": "float32"}, {"name": "193", "dtype": "float32"}, {"name": "194", "dtype": "float32"}, {"name": "195", "dtype": "float32"}, {"name": "196", "dtype": "float32"}, {"name": "197", "dtype": "float32"}, {"name": "198", "dtype": "float32"}, {"name": "199", "dtype": "float32"}, {"name": "200", "dtype": "float32"}, {"name": "201", "dtype": "float32"}, {"name": "202", "dtype": "float32"}, {"name": "203", "dtype": "float32"}, {"name": "204", "dtype": "float32"}, {"name": "205", "dtype": "float32"}, {"name": "206", "dtype": "float32"}, {"name": "207", "dtype": "float32"}, {"name": "208", "dtype": "float32"}, {"name": "209", "dtype": "float32"}, {"name": "210", "dtype": "float32"}, {"name": "211", "dtype": "float32"}, {"name": "212", "dtype": "float32"}, {"name": "213", "dtype": "float32"}, {"name": "214", "dtype": "float32"}, {"name": "215", "dtype": "float32"}, {"name": "216", "dtype": "float32"}, {"name": "217", "dtype": "float32"}, {"name": "218", "dtype": "float32"}, {"name": "219", "dtype": "float32"}, {"name": "220", "dtype": "float32"}, {"name": "221", "dtype": "float32"}, {"name": "222", "dtype": "float32"}, {"name": "223", "dtype": "float32"}, {"name": "224", "dtype": "float32"}, {"name": "225", "dtype": "float32"}, {"name": "226", "dtype": "float32"}, {"name": "227", "dtype": "float32"}, {"name": "228", "dtype": "float32"}, {"name": "229", "dtype": "float32"}, {"name": "230", "dtype": "float32"}, {"name": "231", "dtype": "float32"}, {"name": "232", "dtype": "float32"}, {"name": "233", "dtype": "float32"}, {"name": "234", "dtype": "float32"}, {"name": "235", "dtype": "float32"}, {"name": "236", "dtype": "float32"}, {"name": "237", "dtype": "float32"}, {"name": "238", "dtype": "float32"}, {"name": "239", "dtype": "float32"}, {"name": "240", "dtype": "float32"}, {"name": "241", "dtype": "float32"}, {"name": "242", "dtype": "float32"}, {"name": "243", "dtype": "float32"}, {"name": "244", "dtype": "float32"}, {"name": "245", "dtype": "float32"}, {"name": "246", "dtype": "float32"}, {"name": "247", "dtype": "float32"}, {"name": "248", "dtype": "float32"}, {"name": "249", "dtype": "float32"}, {"name": "250", "dtype": "float32"}, {"name": "251", "dtype": "float32"}, {"name": "252", "dtype": "float32"}, {"name": "253", "dtype": "float32"}, {"name": "254", "dtype": "float32"}, {"name": "255", "dtype": "float32"}, {"name": "256", "dtype": "float32"}, {"name": "257", "dtype": "float32"}, {"name": "258", "dtype": "float32"}, {"name": "259", "dtype": "float32"}, {"name": "260", "dtype": "float32"}, {"name": "261", "dtype": "float32"}, {"name": "262", "dtype": "float32"}, {"name": "263", "dtype": "float32"}, {"name": "264", "dtype": "float32"}, {"name": "265", "dtype": "float32"}, {"name": "266", "dtype": "float32"}, {"name": "267", "dtype": "float32"}, {"name": "268", "dtype": "float32"}, {"name": "269", "dtype": "float32"}, {"name": "270", "dtype": "float32"}, {"name": "271", "dtype": "float32"}, {"name": "272", "dtype": "float32"}, {"name": "273", "dtype": "float32"}, {"name": "274", "dtype": "float32"}, {"name": "275", "dtype": "float32"}, {"name": "276", "dtype": "float32"}, {"name": "277", "dtype": "float32"}, {"name": "278", "dtype": "float32"}, {"name": "279", "dtype": "float32"}, {"name": "280", "dtype": "float32"}, {"name": "281", "dtype": "float32"}, {"name": "282", "dtype": "float32"}, {"name": "283", "dtype": "float32"}, {"name": "284", "dtype": "float32"}, {"name": "285", "dtype": "float32"}, {"name": "286", "dtype": "float32"}, {"name": "287", "dtype": "float32"}, {"name": "288", "dtype": "float32"}, {"name": "289", "dtype": "float32"}, {"name": "290", "dtype": "float32"}, {"name": "291", "dtype": "float32"}, {"name": "292", "dtype": "float32"}, {"name": "293", "dtype": "float32"}, {"name": "294", "dtype": "float32"}, {"name": "295", "dtype": "float32"}, {"name": "296", "dtype": "float32"}, {"name": "297", "dtype": "float32"}, {"name": "298", "dtype": "float32"}, {"name": "299", "dtype": "float32"}, {"name": "300", "dtype": "float32"}, {"name": "301", "dtype": "float32"}, {"name": "302", "dtype": "float32"}, {"name": "303", "dtype": "float32"}, {"name": "304", "dtype": "float32"}, {"name": "305", "dtype": "float32"}, {"name": "306", "dtype": "float32"}, {"name": "307", "dtype": "float32"}, {"name": "308", "dtype": "float32"}, {"name": "309", "dtype": "float32"}, {"name": "310", "dtype": "float32"}, {"name": "311", "dtype": "float32"}, {"name": "312", "dtype": "float32"}, {"name": "313", "dtype": "float32"}, {"name": "314", "dtype": "float32"}, {"name": "315", "dtype": "float32"}, {"name": "316", "dtype": "float32"}, {"name": "317", "dtype": "float32"}, {"name": "318", "dtype": "float32"}, {"name": "319", "dtype": "float32"}, {"name": "320", "dtype": "float32"}, {"name": "321", "dtype": "float32"}, {"name": "322", "dtype": "float32"}, {"name": "323", "dtype": "float32"}, {"name": "324", "dtype": "float32"}, {"name": "325", "dtype": "float32"}, {"name": "326", "dtype": "float32"}, {"name": "327", "dtype": "float32"}, {"name": "328", "dtype": "float32"}, {"name": "329", "dtype": "float32"}, {"name": "330", "dtype": "float32"}, {"name": "331", "dtype": "float32"}, {"name": "332", "dtype": "float32"}, {"name": "333", "dtype": "float32"}, {"name": "334", "dtype": "float32"}, {"name": "335", "dtype": "float32"}, {"name": "336", "dtype": "float32"}, {"name": "337", "dtype": "float32"}, {"name": "338", "dtype": "float32"}, {"name": "339", "dtype": "float32"}, {"name": "340", "dtype": "float32"}, {"name": "341", "dtype": "float32"}, {"name": "342", "dtype": "float32"}, {"name": "343", "dtype": "float32"}, {"name": "344", "dtype": "float32"}, {"name": "345", "dtype": "float32"}, {"name": "346", "dtype": "float32"}, {"name": "347", "dtype": "float32"}, {"name": "348", "dtype": "float32"}, {"name": "349", "dtype": "float32"}, {"name": "350", "dtype": "float32"}, {"name": "351", "dtype": "float32"}, {"name": "352", "dtype": "float32"}, {"name": "353", "dtype": "float32"}, {"name": "354", "dtype": "float32"}, {"name": "355", "dtype": "float32"}, {"name": "356", "dtype": "float32"}, {"name": "357", "dtype": "float32"}, {"name": "358", "dtype": "float32"}, {"name": "359", "dtype": "float32"}, {"name": "360", "dtype": "float32"}, {"name": "361", "dtype": "float32"}, {"name": "362", "dtype": "float32"}, {"name": "363", "dtype": "float32"}, {"name": "364", "dtype": "float32"}, {"name": "365", "dtype": "float32"}, {"name": "366", "dtype": "float32"}, {"name": "367", "dtype": "float32"}, {"name": "368", "dtype": "float32"}, {"name": "369", "dtype": "float32"}, {"name": "370", "dtype": "float32"}, {"name": "371", "dtype": "float32"}, {"name": "372", "dtype": "float32"}, {"name": "373", "dtype": "float32"}, {"name": "374", "dtype": "float32"}, {"name": "375", "dtype": "float32"}, {"name": "376", "dtype": "float32"}, {"name": "377", "dtype": "float32"}, {"name": "378", "dtype": "float32"}, {"name": "379", "dtype": "float32"}, {"name": "380", "dtype": "float32"}, {"name": "381", "dtype": "float32"}, {"name": "382", "dtype": "float32"}, {"name": "383", "dtype": "float32"}, {"name": "384", "dtype": "float32"}, {"name": "385", "dtype": "float32"}, {"name": "386", "dtype": "float32"}, {"name": "387", "dtype": "float32"}, {"name": "388", "dtype": "float32"}, {"name": "389", "dtype": "float32"}, {"name": "390", "dtype": "float32"}, {"name": "391", "dtype": "float32"}, {"name": "392", "dtype": "float32"}, {"name": "393", "dtype": "float32"}, {"name": "394", "dtype": "float32"}, {"name": "395", "dtype": "float32"}, {"name": "396", "dtype": "float32"}, {"name": "397", "dtype": "float32"}, {"name": "398", "dtype": "float32"}, {"name": "399", "dtype": "float32"}, {"name": "400", "dtype": "float32"}, {"name": "401", "dtype": "float32"}, {"name": "402", "dtype": "float32"}, {"name": "403", "dtype": "float32"}, {"name": "404", "dtype": "float32"}, {"name": "405", "dtype": "float32"}, {"name": "406", "dtype": "float32"}, {"name": "407", "dtype": "float32"}, {"name": "408", "dtype": "float32"}, {"name": "409", "dtype": "float32"}, {"name": "410", "dtype": "float32"}, {"name": "411", "dtype": "float32"}, {"name": "412", "dtype": "float32"}, {"name": "413", "dtype": "float32"}, {"name": "414", "dtype": "float32"}, {"name": "415", "dtype": "float32"}, {"name": "416", "dtype": "float32"}, {"name": "417", "dtype": "float32"}, {"name": "418", "dtype": "float32"}, {"name": "419", "dtype": "float32"}, {"name": "420", "dtype": "float32"}, {"name": "421", "dtype": "float32"}, {"name": "422", "dtype": "float32"}, {"name": "423", "dtype": "float32"}, {"name": "424", "dtype": "float32"}, {"name": "425", "dtype": "float32"}, {"name": "426", "dtype": "float32"}, {"name": "427", "dtype": "float32"}, {"name": "428", "dtype": "float32"}, {"name": "429", "dtype": "float32"}, {"name": "430", "dtype": "float32"}, {"name": "431", "dtype": "float32"}, {"name": "432", "dtype": "float32"}, {"name": "433", "dtype": "float32"}, {"name": "434", "dtype": "float32"}, {"name": "435", "dtype": "float32"}, {"name": "436", "dtype": "float32"}, {"name": "437", "dtype": "float32"}, {"name": "438", "dtype": "float32"}, {"name": "439", "dtype": "float32"}, {"name": "440", "dtype": "float32"}, {"name": "441", "dtype": "float32"}, {"name": "442", "dtype": "float32"}, {"name": "443", "dtype": "float32"}, {"name": "444", "dtype": "float32"}, {"name": "445", "dtype": "float32"}, {"name": "446", "dtype": "float32"}, {"name": "447", "dtype": "float32"}, {"name": "448", "dtype": "float32"}, {"name": "449", "dtype": "float32"}, {"name": "450", "dtype": "float32"}, {"name": "451", "dtype": "float32"}, {"name": "452", "dtype": "float32"}, {"name": "453", "dtype": "float32"}, {"name": "454", "dtype": "float32"}, {"name": "455", "dtype": "float32"}, {"name": "456", "dtype": "float32"}, {"name": "457", "dtype": "float32"}, {"name": "458", "dtype": "float32"}, {"name": "459", "dtype": "float32"}, {"name": "460", "dtype": "float32"}, {"name": "461", "dtype": "float32"}, {"name": "462", "dtype": "float32"}, {"name": "463", "dtype": "float32"}, {"name": "464", "dtype": "float32"}, {"name": "465", "dtype": "float32"}, {"name": "466", "dtype": "float32"}, {"name": "467", "dtype": "float32"}, {"name": "468", "dtype": "float32"}, {"name": "469", "dtype": "float32"}, {"name": "470", "dtype": "float32"}, {"name": "471", "dtype": "float32"}, {"name": "472", "dtype": "float32"}, {"name": "473", "dtype": "float32"}, {"name": "474", "dtype": "float32"}, {"name": "475", "dtype": "float32"}, {"name": "476", "dtype": "float32"}, {"name": "477", "dtype": "float32"}, {"name": "478", "dtype": "float32"}, {"name": "479", "dtype": "float32"}, {"name": "480", "dtype": "float32"}, {"name": "481", "dtype": "float32"}, {"name": "482", "dtype": "float32"}, {"name": "483", "dtype": "float32"}, {"name": "484", "dtype": "float32"}, {"name": "485", "dtype": "float32"}, {"name": "486", "dtype": "float32"}, {"name": "487", "dtype": "float32"}, {"name": "488", "dtype": "float32"}, {"name": "489", "dtype": "float32"}, {"name": "490", "dtype": "float32"}, {"name": "491", "dtype": "float32"}, {"name": "492", "dtype": "float32"}, {"name": "493", "dtype": "float32"}, {"name": "494", "dtype": "float32"}, {"name": "495", "dtype": "float32"}, {"name": "496", "dtype": "float32"}, {"name": "497", "dtype": "float32"}, {"name": "498", "dtype": "float32"}, {"name": "499", "dtype": "float32"}, {"name": "500", "dtype": "float32"}, {"name": "501", "dtype": "float32"}, {"name": "502", "dtype": "float32"}, {"name": "503", "dtype": "float32"}, {"name": "504", "dtype": "float32"}, {"name": "505", "dtype": "float32"}, {"name": "506", "dtype": "float32"}, {"name": "507", "dtype": "float32"}, {"name": "508", "dtype": "float32"}, {"name": "509", "dtype": "float32"}, {"name": "510", "dtype": "float32"}, {"name": "511", "dtype": "float32"}, {"name": "512", "dtype": "float32"}, {"name": "513", "dtype": "float32"}, {"name": "514", "dtype": "float32"}, {"name": "515", "dtype": "float32"}, {"name": "516", "dtype": "float32"}, {"name": "517", "dtype": "float32"}, {"name": "518", "dtype": "float32"}, {"name": "519", "dtype": "float32"}, {"name": "520", "dtype": "float32"}, {"name": "521", "dtype": "float32"}, {"name": "522", "dtype": "float32"}, {"name": "523", "dtype": "float32"}, {"name": "524", "dtype": "float32"}, {"name": "525", "dtype": "float32"}, {"name": "526", "dtype": "float32"}, {"name": "527", "dtype": "float32"}, {"name": "528", "dtype": "float32"}, {"name": "529", "dtype": "float32"}, {"name": "530", "dtype": "float32"}, {"name": "531", "dtype": "float32"}, {"name": "532", "dtype": "float32"}, {"name": "533", "dtype": "float32"}, {"name": "534", "dtype": "float32"}, {"name": "535", "dtype": "float32"}, {"name": "536", "dtype": "float32"}, {"name": "537", "dtype": "float32"}, {"name": "538", "dtype": "float32"}, {"name": "539", "dtype": "float32"}, {"name": "540", "dtype": "float32"}, {"name": "541", "dtype": "float32"}, {"name": "542", "dtype": "float32"}, {"name": "543", "dtype": "float32"}, {"name": "544", "dtype": "float32"}, {"name": "545", "dtype": "float32"}, {"name": "546", "dtype": "float32"}, {"name": "547", "dtype": "float32"}, {"name": "548", "dtype": "float32"}, {"name": "549", "dtype": "float32"}, {"name": "550", "dtype": "float32"}, {"name": "551", "dtype": "float32"}, {"name": "552", "dtype": "float32"}, {"name": "553", "dtype": "float32"}, {"name": "554", "dtype": "float32"}, {"name": "555", "dtype": "float32"}, {"name": "556", "dtype": "float32"}, {"name": "557", "dtype": "float32"}, {"name": "558", "dtype": "float32"}, {"name": "559", "dtype": "float32"}, {"name": "560", "dtype": "float32"}, {"name": "561", "dtype": "float32"}, {"name": "562", "dtype": "float32"}, {"name": "563", "dtype": "float32"}, {"name": "564", "dtype": "float32"}, {"name": "565", "dtype": "float32"}, {"name": "566", "dtype": "float32"}, {"name": "567", "dtype": "float32"}, {"name": "568", "dtype": "float32"}, {"name": "569", "dtype": "float32"}, {"name": "570", "dtype": "float32"}, {"name": "571", "dtype": "float32"}, {"name": "572", "dtype": "float32"}, {"name": "573", "dtype": "float32"}, {"name": "574", "dtype": "float32"}, {"name": "575", "dtype": "float32"}, {"name": "576", "dtype": "float32"}, {"name": "577", "dtype": "float32"}, {"name": "578", "dtype": "float32"}, {"name": "579", "dtype": "float32"}, {"name": "580", "dtype": "float32"}, {"name": "581", "dtype": "float32"}, {"name": "582", "dtype": "float32"}, {"name": "583", "dtype": "float32"}, {"name": "584", "dtype": "float32"}, {"name": "585", "dtype": "float32"}, {"name": "586", "dtype": "float32"}, {"name": "587", "dtype": "float32"}, {"name": "588", "dtype": "float32"}, {"name": "589", "dtype": "float32"}, {"name": "590", "dtype": "float32"}, {"name": "591", "dtype": "float32"}, {"name": "592", "dtype": "float32"}, {"name": "593", "dtype": "float32"}, {"name": "594", "dtype": "float32"}, {"name": "595", "dtype": "float32"}, {"name": "596", "dtype": "float32"}, {"name": "597", "dtype": "float32"}, {"name": "598", "dtype": "float32"}, {"name": "599", "dtype": "float32"}, {"name": "600", "dtype": "float32"}, {"name": "601", "dtype": "float32"}, {"name": "602", "dtype": "float32"}, {"name": "603", "dtype": "float32"}, {"name": "604", "dtype": "float32"}, {"name": "605", "dtype": "float32"}, {"name": "606", "dtype": "float32"}, {"name": "607", "dtype": "float32"}, {"name": "608", "dtype": "float32"}, {"name": "609", "dtype": "float32"}, {"name": "610", "dtype": "float32"}, {"name": "611", "dtype": "float32"}, {"name": "612", "dtype": "float32"}, {"name": "613", "dtype": "float32"}, {"name": "614", "dtype": "float32"}, {"name": "615", "dtype": "float32"}, {"name": "616", "dtype": "float32"}, {"name": "617", "dtype": "float32"}, {"name": "618", "dtype": "float32"}, {"name": "619", "dtype": "float32"}, {"name": "620", "dtype": "float32"}, {"name": "621", "dtype": "float32"}, {"name": "622", "dtype": "float32"}, {"name": "623", "dtype": "float32"}, {"name": "624", "dtype": "float32"}, {"name": "625", "dtype": "float32"}, {"name": "626", "dtype": "float32"}, {"name": "627", "dtype": "float32"}, {"name": "628", "dtype": "float32"}, {"name": "629", "dtype": "float32"}, {"name": "630", "dtype": "float32"}, {"name": "631", "dtype": "float32"}, {"name": "632", "dtype": "float32"}, {"name": "633", "dtype": "float32"}, {"name": "634", "dtype": "float32"}, {"name": "635", "dtype": "float32"}, {"name": "636", "dtype": "float32"}, {"name": "637", "dtype": "float32"}, {"name": "638", "dtype": "float32"}, {"name": "639", "dtype": "float32"}, {"name": "640", "dtype": "float32"}, {"name": "641", "dtype": "float32"}, {"name": "642", "dtype": "float32"}, {"name": "643", "dtype": "float32"}, {"name": "644", "dtype": "float32"}, {"name": "645", "dtype": "float32"}, {"name": "646", "dtype": "float32"}, {"name": "647", "dtype": "float32"}, {"name": "648", "dtype": "float32"}, {"name": "649", "dtype": "float32"}, {"name": "650", "dtype": "float32"}, {"name": "651", "dtype": "float32"}, {"name": "652", "dtype": "float32"}, {"name": "653", "dtype": "float32"}, {"name": "654", "dtype": "float32"}, {"name": "655", "dtype": "float32"}, {"name": "656", "dtype": "float32"}, {"name": "657", "dtype": "float32"}, {"name": "658", "dtype": "float32"}, {"name": "659", "dtype": "float32"}, {"name": "660", "dtype": "float32"}, {"name": "661", "dtype": "float32"}, {"name": "662", "dtype": "float32"}, {"name": "663", "dtype": "float32"}, {"name": "664", "dtype": "float32"}, {"name": "665", "dtype": "float32"}, {"name": "666", "dtype": "float32"}, {"name": "667", "dtype": "float32"}, {"name": "668", "dtype": "float32"}, {"name": "669", "dtype": "float32"}, {"name": "670", "dtype": "float32"}, {"name": "671", "dtype": "float32"}, {"name": "672", "dtype": "float32"}, {"name": "673", "dtype": "float32"}, {"name": "674", "dtype": "float32"}, {"name": "675", "dtype": "float32"}, {"name": "676", "dtype": "float32"}, {"name": "677", "dtype": "float32"}, {"name": "678", "dtype": "float32"}, {"name": "679", "dtype": "float32"}, {"name": "680", "dtype": "float32"}, {"name": "681", "dtype": "float32"}, {"name": "682", "dtype": "float32"}, {"name": "683", "dtype": "float32"}, {"name": "684", "dtype": "float32"}, {"name": "685", "dtype": "float32"}, {"name": "686", "dtype": "float32"}, {"name": "687", "dtype": "float32"}, {"name": "688", "dtype": "float32"}, {"name": "689", "dtype": "float32"}, {"name": "690", "dtype": "float32"}, {"name": "691", "dtype": "float32"}, {"name": "692", "dtype": "float32"}, {"name": "693", "dtype": "float32"}, {"name": "694", "dtype": "float32"}, {"name": "695", "dtype": "float32"}, {"name": "696", "dtype": "float32"}, {"name": "697", "dtype": "float32"}, {"name": "698", "dtype": "float32"}, {"name": "699", "dtype": "float32"}, {"name": "700", "dtype": "float32"}, {"name": "701", "dtype": "float32"}, {"name": "702", "dtype": "float32"}, {"name": "703", "dtype": "float32"}, {"name": "704", "dtype": "float32"}, {"name": "705", "dtype": "float32"}, {"name": "706", "dtype": "float32"}, {"name": "707", "dtype": "float32"}, {"name": "708", "dtype": "float32"}, {"name": "709", "dtype": "float32"}, {"name": "710", "dtype": "float32"}, {"name": "711", "dtype": "float32"}, {"name": "712", "dtype": "float32"}, {"name": "713", "dtype": "float32"}, {"name": "714", "dtype": "float32"}, {"name": "715", "dtype": "float32"}, {"name": "716", "dtype": "float32"}, {"name": "717", "dtype": "float32"}, {"name": "718", "dtype": "float32"}, {"name": "719", "dtype": "float32"}, {"name": "720", "dtype": "float32"}, {"name": "721", "dtype": "float32"}, {"name": "722", "dtype": "float32"}, {"name": "723", "dtype": "float32"}, {"name": "724", "dtype": "float32"}, {"name": "725", "dtype": "float32"}, {"name": "726", "dtype": "float32"}, {"name": "727", "dtype": "float32"}, {"name": "728", "dtype": "float32"}, {"name": "729", "dtype": "float32"}, {"name": "730", "dtype": "float32"}, {"name": "731", "dtype": "float32"}, {"name": "732", "dtype": "float32"}, {"name": "733", "dtype": "float32"}, {"name": "734", "dtype": "float32"}, {"name": "735", "dtype": "float32"}, {"name": "736", "dtype": "float32"}, {"name": "737", "dtype": "float32"}, {"name": "738", "dtype": "float32"}, {"name": "739", "dtype": "float32"}, {"name": "740", "dtype": "float32"}, {"name": "741", "dtype": "float32"}, {"name": "742", "dtype": "float32"}, {"name": "743", "dtype": "float32"}, {"name": "744", "dtype": "float32"}, {"name": "745", "dtype": "float32"}, {"name": "746", "dtype": "float32"}, {"name": "747", "dtype": "float32"}, {"name": "748", "dtype": "float32"}, {"name": "749", "dtype": "float32"}, {"name": "750", "dtype": "float32"}, {"name": "751", "dtype": "float32"}, {"name": "752", "dtype": "float32"}, {"name": "753", "dtype": "float32"}, {"name": "754", "dtype": "float32"}, {"name": "755", "dtype": "float32"}, {"name": "756", "dtype": "float32"}, {"name": "757", "dtype": "float32"}, {"name": "758", "dtype": "float32"}, {"name": "759", "dtype": "float32"}, {"name": "760", "dtype": "float32"}, {"name": "761", "dtype": "float32"}, {"name": "762", "dtype": "float32"}, {"name": "763", "dtype": "float32"}, {"name": "764", "dtype": "float32"}, {"name": "765", "dtype": "float32"}, {"name": "766", "dtype": "float32"}, {"name": "767", "dtype": "float32"}, {"name": "label", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 115582709.0625, "num_examples": 37500}, {"name": "test", "num_bytes": 38527570.0, "num_examples": 12500}], "download_size": 211881880, "dataset_size": 154110279.0625}}
2023-08-23T05:10:35+00:00
[]
[]
TAGS #region-us
# Dataset Card for "BGL_RoBERTa_Finetuned" More Information needed
[ "# Dataset Card for \"BGL_RoBERTa_Finetuned\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"BGL_RoBERTa_Finetuned\"\n\nMore Information needed" ]
[ 6, 21 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"BGL_RoBERTa_Finetuned\"\n\nMore Information needed" ]
a9bb6e54efb98d9864fbc818003262e5e77be3ee
# Dataset Card for "dermatology_anomaly_detection" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
KhalfounMehdi/dermatology_anomaly_detection
[ "region:us" ]
2023-08-18T15:11:58+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 51523748.0, "num_examples": 656}], "download_size": 51529683, "dataset_size": 51523748.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}, {"config_name": "KhalfounMehdi--dermatology_anomaly_detection", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-08-18T15:36:55+00:00
[]
[]
TAGS #region-us
# Dataset Card for "dermatology_anomaly_detection" More Information needed
[ "# Dataset Card for \"dermatology_anomaly_detection\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"dermatology_anomaly_detection\"\n\nMore Information needed" ]
[ 6, 20 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"dermatology_anomaly_detection\"\n\nMore Information needed" ]
a64cb39f6d30d0bc5becb0727fb20f969581f1f2
# Dataset Card for "BGL_DistilRoBERTa_Finetuned" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
EgilKarlsen/BGL_DistilRoBERTa_Finetuned
[ "region:us" ]
2023-08-18T15:17:30+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "0", "dtype": "float32"}, {"name": "1", "dtype": "float32"}, {"name": "2", "dtype": "float32"}, {"name": "3", "dtype": "float32"}, {"name": "4", "dtype": "float32"}, {"name": "5", "dtype": "float32"}, {"name": "6", "dtype": "float32"}, {"name": "7", "dtype": "float32"}, {"name": "8", "dtype": "float32"}, {"name": "9", "dtype": "float32"}, {"name": "10", "dtype": "float32"}, {"name": "11", "dtype": "float32"}, {"name": "12", "dtype": "float32"}, {"name": "13", "dtype": "float32"}, {"name": "14", "dtype": "float32"}, {"name": "15", "dtype": "float32"}, {"name": "16", "dtype": "float32"}, {"name": "17", "dtype": "float32"}, {"name": "18", "dtype": "float32"}, {"name": "19", "dtype": "float32"}, {"name": "20", "dtype": "float32"}, {"name": "21", "dtype": "float32"}, {"name": "22", "dtype": "float32"}, {"name": "23", "dtype": "float32"}, {"name": "24", "dtype": "float32"}, {"name": "25", "dtype": "float32"}, {"name": "26", "dtype": "float32"}, {"name": "27", "dtype": "float32"}, {"name": "28", "dtype": "float32"}, {"name": "29", "dtype": "float32"}, {"name": "30", "dtype": "float32"}, {"name": "31", "dtype": "float32"}, {"name": "32", "dtype": "float32"}, {"name": "33", "dtype": "float32"}, {"name": "34", "dtype": "float32"}, {"name": "35", "dtype": "float32"}, {"name": "36", "dtype": "float32"}, {"name": "37", "dtype": "float32"}, {"name": "38", "dtype": "float32"}, {"name": "39", "dtype": "float32"}, {"name": "40", "dtype": "float32"}, {"name": "41", "dtype": "float32"}, {"name": "42", "dtype": "float32"}, {"name": "43", "dtype": "float32"}, {"name": "44", "dtype": "float32"}, {"name": "45", "dtype": "float32"}, {"name": "46", "dtype": "float32"}, {"name": "47", "dtype": "float32"}, {"name": "48", "dtype": "float32"}, {"name": "49", "dtype": "float32"}, {"name": "50", "dtype": "float32"}, {"name": "51", "dtype": "float32"}, {"name": "52", "dtype": "float32"}, {"name": "53", "dtype": "float32"}, {"name": "54", "dtype": "float32"}, {"name": "55", "dtype": "float32"}, {"name": "56", "dtype": "float32"}, {"name": "57", "dtype": "float32"}, {"name": "58", "dtype": "float32"}, {"name": "59", "dtype": "float32"}, {"name": "60", "dtype": "float32"}, {"name": "61", "dtype": "float32"}, {"name": "62", "dtype": "float32"}, {"name": "63", "dtype": "float32"}, {"name": "64", "dtype": "float32"}, {"name": "65", "dtype": "float32"}, {"name": "66", "dtype": "float32"}, {"name": "67", "dtype": "float32"}, {"name": "68", "dtype": "float32"}, {"name": "69", "dtype": "float32"}, {"name": "70", "dtype": "float32"}, {"name": "71", "dtype": "float32"}, {"name": "72", "dtype": "float32"}, {"name": "73", "dtype": "float32"}, {"name": "74", "dtype": "float32"}, {"name": "75", "dtype": "float32"}, {"name": "76", "dtype": "float32"}, {"name": "77", "dtype": "float32"}, {"name": "78", "dtype": "float32"}, {"name": "79", "dtype": "float32"}, {"name": "80", "dtype": "float32"}, {"name": "81", "dtype": "float32"}, {"name": "82", "dtype": "float32"}, {"name": "83", "dtype": "float32"}, {"name": "84", "dtype": "float32"}, {"name": "85", "dtype": "float32"}, {"name": "86", "dtype": "float32"}, {"name": "87", "dtype": "float32"}, {"name": "88", "dtype": "float32"}, {"name": "89", "dtype": "float32"}, {"name": "90", "dtype": "float32"}, {"name": "91", "dtype": "float32"}, {"name": "92", "dtype": "float32"}, {"name": "93", "dtype": "float32"}, {"name": "94", "dtype": "float32"}, {"name": "95", "dtype": "float32"}, {"name": "96", "dtype": "float32"}, {"name": "97", "dtype": "float32"}, {"name": "98", "dtype": "float32"}, {"name": "99", "dtype": "float32"}, {"name": "100", "dtype": "float32"}, {"name": "101", "dtype": "float32"}, {"name": "102", "dtype": "float32"}, {"name": "103", "dtype": "float32"}, {"name": "104", "dtype": "float32"}, {"name": "105", "dtype": "float32"}, {"name": "106", "dtype": "float32"}, {"name": "107", "dtype": "float32"}, {"name": "108", "dtype": "float32"}, {"name": "109", "dtype": "float32"}, {"name": "110", "dtype": "float32"}, {"name": "111", "dtype": "float32"}, {"name": "112", "dtype": "float32"}, {"name": "113", "dtype": "float32"}, {"name": "114", "dtype": "float32"}, {"name": "115", "dtype": "float32"}, {"name": "116", "dtype": "float32"}, {"name": "117", "dtype": "float32"}, {"name": "118", "dtype": "float32"}, {"name": "119", "dtype": "float32"}, {"name": "120", "dtype": "float32"}, {"name": "121", "dtype": "float32"}, {"name": "122", "dtype": "float32"}, {"name": "123", "dtype": "float32"}, {"name": "124", "dtype": "float32"}, {"name": "125", "dtype": "float32"}, {"name": "126", "dtype": "float32"}, {"name": "127", "dtype": "float32"}, {"name": "128", "dtype": "float32"}, {"name": "129", "dtype": "float32"}, {"name": "130", "dtype": "float32"}, {"name": "131", "dtype": "float32"}, {"name": "132", "dtype": "float32"}, {"name": "133", "dtype": "float32"}, {"name": "134", "dtype": "float32"}, {"name": "135", "dtype": "float32"}, {"name": "136", "dtype": "float32"}, {"name": "137", "dtype": "float32"}, {"name": "138", "dtype": "float32"}, {"name": "139", "dtype": "float32"}, {"name": "140", "dtype": "float32"}, {"name": "141", "dtype": "float32"}, {"name": "142", "dtype": "float32"}, {"name": "143", "dtype": "float32"}, {"name": "144", "dtype": "float32"}, {"name": "145", "dtype": "float32"}, {"name": "146", "dtype": "float32"}, {"name": "147", "dtype": "float32"}, {"name": "148", "dtype": "float32"}, {"name": "149", "dtype": "float32"}, {"name": "150", "dtype": "float32"}, {"name": "151", "dtype": "float32"}, {"name": "152", "dtype": "float32"}, {"name": "153", "dtype": "float32"}, {"name": "154", "dtype": "float32"}, {"name": "155", "dtype": "float32"}, {"name": "156", "dtype": "float32"}, {"name": "157", "dtype": "float32"}, {"name": "158", "dtype": "float32"}, {"name": "159", "dtype": "float32"}, {"name": "160", "dtype": "float32"}, {"name": "161", "dtype": "float32"}, {"name": "162", "dtype": "float32"}, {"name": "163", "dtype": "float32"}, {"name": "164", "dtype": "float32"}, {"name": "165", "dtype": "float32"}, {"name": "166", "dtype": "float32"}, {"name": "167", "dtype": "float32"}, {"name": "168", "dtype": "float32"}, {"name": "169", "dtype": "float32"}, {"name": "170", "dtype": "float32"}, {"name": "171", "dtype": "float32"}, {"name": "172", "dtype": "float32"}, {"name": "173", "dtype": "float32"}, {"name": "174", "dtype": "float32"}, {"name": "175", "dtype": "float32"}, {"name": "176", "dtype": "float32"}, {"name": "177", "dtype": "float32"}, {"name": "178", "dtype": "float32"}, {"name": "179", "dtype": "float32"}, {"name": "180", "dtype": "float32"}, {"name": "181", "dtype": "float32"}, {"name": "182", "dtype": "float32"}, {"name": "183", "dtype": "float32"}, {"name": "184", "dtype": "float32"}, {"name": "185", "dtype": "float32"}, {"name": "186", "dtype": "float32"}, {"name": "187", "dtype": "float32"}, {"name": "188", "dtype": "float32"}, {"name": "189", "dtype": "float32"}, {"name": "190", "dtype": "float32"}, {"name": "191", "dtype": "float32"}, {"name": "192", "dtype": "float32"}, {"name": "193", "dtype": "float32"}, {"name": "194", "dtype": "float32"}, {"name": "195", "dtype": "float32"}, {"name": "196", "dtype": "float32"}, {"name": "197", "dtype": "float32"}, {"name": "198", "dtype": "float32"}, {"name": "199", "dtype": "float32"}, {"name": "200", "dtype": "float32"}, {"name": "201", "dtype": "float32"}, {"name": "202", "dtype": "float32"}, {"name": "203", "dtype": "float32"}, {"name": "204", "dtype": "float32"}, {"name": "205", "dtype": "float32"}, {"name": "206", "dtype": "float32"}, {"name": "207", "dtype": "float32"}, {"name": "208", "dtype": "float32"}, {"name": "209", "dtype": "float32"}, {"name": "210", "dtype": "float32"}, {"name": "211", "dtype": "float32"}, {"name": "212", "dtype": "float32"}, {"name": "213", "dtype": "float32"}, {"name": "214", "dtype": "float32"}, {"name": "215", "dtype": "float32"}, {"name": "216", "dtype": "float32"}, {"name": "217", "dtype": "float32"}, {"name": "218", "dtype": "float32"}, {"name": "219", "dtype": "float32"}, {"name": "220", "dtype": "float32"}, {"name": "221", "dtype": "float32"}, {"name": "222", "dtype": "float32"}, {"name": "223", "dtype": "float32"}, {"name": "224", "dtype": "float32"}, {"name": "225", "dtype": "float32"}, {"name": "226", "dtype": "float32"}, {"name": "227", "dtype": "float32"}, {"name": "228", "dtype": "float32"}, {"name": "229", "dtype": "float32"}, {"name": "230", "dtype": "float32"}, {"name": "231", "dtype": "float32"}, {"name": "232", "dtype": "float32"}, {"name": "233", "dtype": "float32"}, {"name": "234", "dtype": "float32"}, {"name": "235", "dtype": "float32"}, {"name": "236", "dtype": "float32"}, {"name": "237", "dtype": "float32"}, {"name": "238", "dtype": "float32"}, {"name": "239", "dtype": "float32"}, {"name": "240", "dtype": "float32"}, {"name": "241", "dtype": "float32"}, {"name": "242", "dtype": "float32"}, {"name": "243", "dtype": "float32"}, {"name": "244", "dtype": "float32"}, {"name": "245", "dtype": "float32"}, {"name": "246", "dtype": "float32"}, {"name": "247", "dtype": "float32"}, {"name": "248", "dtype": "float32"}, {"name": "249", "dtype": "float32"}, {"name": "250", "dtype": "float32"}, {"name": "251", "dtype": "float32"}, {"name": "252", "dtype": "float32"}, {"name": "253", "dtype": "float32"}, {"name": "254", "dtype": "float32"}, {"name": "255", "dtype": "float32"}, {"name": "256", "dtype": "float32"}, {"name": "257", "dtype": "float32"}, {"name": "258", "dtype": "float32"}, {"name": "259", "dtype": "float32"}, {"name": "260", "dtype": "float32"}, {"name": "261", "dtype": "float32"}, {"name": "262", "dtype": "float32"}, {"name": "263", "dtype": "float32"}, {"name": "264", "dtype": "float32"}, {"name": "265", "dtype": "float32"}, {"name": "266", "dtype": "float32"}, {"name": "267", "dtype": "float32"}, {"name": "268", "dtype": "float32"}, {"name": "269", "dtype": "float32"}, {"name": "270", "dtype": "float32"}, {"name": "271", "dtype": "float32"}, {"name": "272", "dtype": "float32"}, {"name": "273", "dtype": "float32"}, {"name": "274", "dtype": "float32"}, {"name": "275", "dtype": "float32"}, {"name": "276", "dtype": "float32"}, {"name": "277", "dtype": "float32"}, {"name": "278", "dtype": "float32"}, {"name": "279", "dtype": "float32"}, {"name": "280", "dtype": "float32"}, {"name": "281", "dtype": "float32"}, {"name": "282", "dtype": "float32"}, {"name": "283", "dtype": "float32"}, {"name": "284", "dtype": "float32"}, {"name": "285", "dtype": "float32"}, {"name": "286", "dtype": "float32"}, {"name": "287", "dtype": "float32"}, {"name": "288", "dtype": "float32"}, {"name": "289", "dtype": "float32"}, {"name": "290", "dtype": "float32"}, {"name": "291", "dtype": "float32"}, {"name": "292", "dtype": "float32"}, {"name": "293", "dtype": "float32"}, {"name": "294", "dtype": "float32"}, {"name": "295", "dtype": "float32"}, {"name": "296", "dtype": "float32"}, {"name": "297", "dtype": "float32"}, {"name": "298", "dtype": "float32"}, {"name": "299", "dtype": "float32"}, {"name": "300", "dtype": "float32"}, {"name": "301", "dtype": "float32"}, {"name": "302", "dtype": "float32"}, {"name": "303", "dtype": "float32"}, {"name": "304", "dtype": "float32"}, {"name": "305", "dtype": "float32"}, {"name": "306", "dtype": "float32"}, {"name": "307", "dtype": "float32"}, {"name": "308", "dtype": "float32"}, {"name": "309", "dtype": "float32"}, {"name": "310", "dtype": "float32"}, {"name": "311", "dtype": "float32"}, {"name": "312", "dtype": "float32"}, {"name": "313", "dtype": "float32"}, {"name": "314", "dtype": "float32"}, {"name": "315", "dtype": "float32"}, {"name": "316", "dtype": "float32"}, {"name": "317", "dtype": "float32"}, {"name": "318", "dtype": "float32"}, {"name": "319", "dtype": "float32"}, {"name": "320", "dtype": "float32"}, {"name": "321", "dtype": "float32"}, {"name": "322", "dtype": "float32"}, {"name": "323", "dtype": "float32"}, {"name": "324", "dtype": "float32"}, {"name": "325", "dtype": "float32"}, {"name": "326", "dtype": "float32"}, {"name": "327", "dtype": "float32"}, {"name": "328", "dtype": "float32"}, {"name": "329", "dtype": "float32"}, {"name": "330", "dtype": "float32"}, {"name": "331", "dtype": "float32"}, {"name": "332", "dtype": "float32"}, {"name": "333", "dtype": "float32"}, {"name": "334", "dtype": "float32"}, {"name": "335", "dtype": "float32"}, {"name": "336", "dtype": "float32"}, {"name": "337", "dtype": "float32"}, {"name": "338", "dtype": "float32"}, {"name": "339", "dtype": "float32"}, {"name": "340", "dtype": "float32"}, {"name": "341", "dtype": "float32"}, {"name": "342", "dtype": "float32"}, {"name": "343", "dtype": "float32"}, {"name": "344", "dtype": "float32"}, {"name": "345", "dtype": "float32"}, {"name": "346", "dtype": "float32"}, {"name": "347", "dtype": "float32"}, {"name": "348", "dtype": "float32"}, {"name": "349", "dtype": "float32"}, {"name": "350", "dtype": "float32"}, {"name": "351", "dtype": "float32"}, {"name": "352", "dtype": "float32"}, {"name": "353", "dtype": "float32"}, {"name": "354", "dtype": "float32"}, {"name": "355", "dtype": "float32"}, {"name": "356", "dtype": "float32"}, {"name": "357", "dtype": "float32"}, {"name": "358", "dtype": "float32"}, {"name": "359", "dtype": "float32"}, {"name": "360", "dtype": "float32"}, {"name": "361", "dtype": "float32"}, {"name": "362", "dtype": "float32"}, {"name": "363", "dtype": "float32"}, {"name": "364", "dtype": "float32"}, {"name": "365", "dtype": "float32"}, {"name": "366", "dtype": "float32"}, {"name": "367", "dtype": "float32"}, {"name": "368", "dtype": "float32"}, {"name": "369", "dtype": "float32"}, {"name": "370", "dtype": "float32"}, {"name": "371", "dtype": "float32"}, {"name": "372", "dtype": "float32"}, {"name": "373", "dtype": "float32"}, {"name": "374", "dtype": "float32"}, {"name": "375", "dtype": "float32"}, {"name": "376", "dtype": "float32"}, {"name": "377", "dtype": "float32"}, {"name": "378", "dtype": "float32"}, {"name": "379", "dtype": "float32"}, {"name": "380", "dtype": "float32"}, {"name": "381", "dtype": "float32"}, {"name": "382", "dtype": "float32"}, {"name": "383", "dtype": "float32"}, {"name": "384", "dtype": "float32"}, {"name": "385", "dtype": "float32"}, {"name": "386", "dtype": "float32"}, {"name": "387", "dtype": "float32"}, {"name": "388", "dtype": "float32"}, {"name": "389", "dtype": "float32"}, {"name": "390", "dtype": "float32"}, {"name": "391", "dtype": "float32"}, {"name": "392", "dtype": "float32"}, {"name": "393", "dtype": "float32"}, {"name": "394", "dtype": "float32"}, {"name": "395", "dtype": "float32"}, {"name": "396", "dtype": "float32"}, {"name": "397", "dtype": "float32"}, {"name": "398", "dtype": "float32"}, {"name": "399", "dtype": "float32"}, {"name": "400", "dtype": "float32"}, {"name": "401", "dtype": "float32"}, {"name": "402", "dtype": "float32"}, {"name": "403", "dtype": "float32"}, {"name": "404", "dtype": "float32"}, {"name": "405", "dtype": "float32"}, {"name": "406", "dtype": "float32"}, {"name": "407", "dtype": "float32"}, {"name": "408", "dtype": "float32"}, {"name": "409", "dtype": "float32"}, {"name": "410", "dtype": "float32"}, {"name": "411", "dtype": "float32"}, {"name": "412", "dtype": "float32"}, {"name": "413", "dtype": "float32"}, {"name": "414", "dtype": "float32"}, {"name": "415", "dtype": "float32"}, {"name": "416", "dtype": "float32"}, {"name": "417", "dtype": "float32"}, {"name": "418", "dtype": "float32"}, {"name": "419", "dtype": "float32"}, {"name": "420", "dtype": "float32"}, {"name": "421", "dtype": "float32"}, {"name": "422", "dtype": "float32"}, {"name": "423", "dtype": "float32"}, {"name": "424", "dtype": "float32"}, {"name": "425", "dtype": "float32"}, {"name": "426", "dtype": "float32"}, {"name": "427", "dtype": "float32"}, {"name": "428", "dtype": "float32"}, {"name": "429", "dtype": "float32"}, {"name": "430", "dtype": "float32"}, {"name": "431", "dtype": "float32"}, {"name": "432", "dtype": "float32"}, {"name": "433", "dtype": "float32"}, {"name": "434", "dtype": "float32"}, {"name": "435", "dtype": "float32"}, {"name": "436", "dtype": "float32"}, {"name": "437", "dtype": "float32"}, {"name": "438", "dtype": "float32"}, {"name": "439", "dtype": "float32"}, {"name": "440", "dtype": "float32"}, {"name": "441", "dtype": "float32"}, {"name": "442", "dtype": "float32"}, {"name": "443", "dtype": "float32"}, {"name": "444", "dtype": "float32"}, {"name": "445", "dtype": "float32"}, {"name": "446", "dtype": "float32"}, {"name": "447", "dtype": "float32"}, {"name": "448", "dtype": "float32"}, {"name": "449", "dtype": "float32"}, {"name": "450", "dtype": "float32"}, {"name": "451", "dtype": "float32"}, {"name": "452", "dtype": "float32"}, {"name": "453", "dtype": "float32"}, {"name": "454", "dtype": "float32"}, {"name": "455", "dtype": "float32"}, {"name": "456", "dtype": "float32"}, {"name": "457", "dtype": "float32"}, {"name": "458", "dtype": "float32"}, {"name": "459", "dtype": "float32"}, {"name": "460", "dtype": "float32"}, {"name": "461", "dtype": "float32"}, {"name": "462", "dtype": "float32"}, {"name": "463", "dtype": "float32"}, {"name": "464", "dtype": "float32"}, {"name": "465", "dtype": "float32"}, {"name": "466", "dtype": "float32"}, {"name": "467", "dtype": "float32"}, {"name": "468", "dtype": "float32"}, {"name": "469", "dtype": "float32"}, {"name": "470", "dtype": "float32"}, {"name": "471", "dtype": "float32"}, {"name": "472", "dtype": "float32"}, {"name": "473", "dtype": "float32"}, {"name": "474", "dtype": "float32"}, {"name": "475", "dtype": "float32"}, {"name": "476", "dtype": "float32"}, {"name": "477", "dtype": "float32"}, {"name": "478", "dtype": "float32"}, {"name": "479", "dtype": "float32"}, {"name": "480", "dtype": "float32"}, {"name": "481", "dtype": "float32"}, {"name": "482", "dtype": "float32"}, {"name": "483", "dtype": "float32"}, {"name": "484", "dtype": "float32"}, {"name": "485", "dtype": "float32"}, {"name": "486", "dtype": "float32"}, {"name": "487", "dtype": "float32"}, {"name": "488", "dtype": "float32"}, {"name": "489", "dtype": "float32"}, {"name": "490", "dtype": "float32"}, {"name": "491", "dtype": "float32"}, {"name": "492", "dtype": "float32"}, {"name": "493", "dtype": "float32"}, {"name": "494", "dtype": "float32"}, {"name": "495", "dtype": "float32"}, {"name": "496", "dtype": "float32"}, {"name": "497", "dtype": "float32"}, {"name": "498", "dtype": "float32"}, {"name": "499", "dtype": "float32"}, {"name": "500", "dtype": "float32"}, {"name": "501", "dtype": "float32"}, {"name": "502", "dtype": "float32"}, {"name": "503", "dtype": "float32"}, {"name": "504", "dtype": "float32"}, {"name": "505", "dtype": "float32"}, {"name": "506", "dtype": "float32"}, {"name": "507", "dtype": "float32"}, {"name": "508", "dtype": "float32"}, {"name": "509", "dtype": "float32"}, {"name": "510", "dtype": "float32"}, {"name": "511", "dtype": "float32"}, {"name": "512", "dtype": "float32"}, {"name": "513", "dtype": "float32"}, {"name": "514", "dtype": "float32"}, {"name": "515", "dtype": "float32"}, {"name": "516", "dtype": "float32"}, {"name": "517", "dtype": "float32"}, {"name": "518", "dtype": "float32"}, {"name": "519", "dtype": "float32"}, {"name": "520", "dtype": "float32"}, {"name": "521", "dtype": "float32"}, {"name": "522", "dtype": "float32"}, {"name": "523", "dtype": "float32"}, {"name": "524", "dtype": "float32"}, {"name": "525", "dtype": "float32"}, {"name": "526", "dtype": "float32"}, {"name": "527", "dtype": "float32"}, {"name": "528", "dtype": "float32"}, {"name": "529", "dtype": "float32"}, {"name": "530", "dtype": "float32"}, {"name": "531", "dtype": "float32"}, {"name": "532", "dtype": "float32"}, {"name": "533", "dtype": "float32"}, {"name": "534", "dtype": "float32"}, {"name": "535", "dtype": "float32"}, {"name": "536", "dtype": "float32"}, {"name": "537", "dtype": "float32"}, {"name": "538", "dtype": "float32"}, {"name": "539", "dtype": "float32"}, {"name": "540", "dtype": "float32"}, {"name": "541", "dtype": "float32"}, {"name": "542", "dtype": "float32"}, {"name": "543", "dtype": "float32"}, {"name": "544", "dtype": "float32"}, {"name": "545", "dtype": "float32"}, {"name": "546", "dtype": "float32"}, {"name": "547", "dtype": "float32"}, {"name": "548", "dtype": "float32"}, {"name": "549", "dtype": "float32"}, {"name": "550", "dtype": "float32"}, {"name": "551", "dtype": "float32"}, {"name": "552", "dtype": "float32"}, {"name": "553", "dtype": "float32"}, {"name": "554", "dtype": "float32"}, {"name": "555", "dtype": "float32"}, {"name": "556", "dtype": "float32"}, {"name": "557", "dtype": "float32"}, {"name": "558", "dtype": "float32"}, {"name": "559", "dtype": "float32"}, {"name": "560", "dtype": "float32"}, {"name": "561", "dtype": "float32"}, {"name": "562", "dtype": "float32"}, {"name": "563", "dtype": "float32"}, {"name": "564", "dtype": "float32"}, {"name": "565", "dtype": "float32"}, {"name": "566", "dtype": "float32"}, {"name": "567", "dtype": "float32"}, {"name": "568", "dtype": "float32"}, {"name": "569", "dtype": "float32"}, {"name": "570", "dtype": "float32"}, {"name": "571", "dtype": "float32"}, {"name": "572", "dtype": "float32"}, {"name": "573", "dtype": "float32"}, {"name": "574", "dtype": "float32"}, {"name": "575", "dtype": "float32"}, {"name": "576", "dtype": "float32"}, {"name": "577", "dtype": "float32"}, {"name": "578", "dtype": "float32"}, {"name": "579", "dtype": "float32"}, {"name": "580", "dtype": "float32"}, {"name": "581", "dtype": "float32"}, {"name": "582", "dtype": "float32"}, {"name": "583", "dtype": "float32"}, {"name": "584", "dtype": "float32"}, {"name": "585", "dtype": "float32"}, {"name": "586", "dtype": "float32"}, {"name": "587", "dtype": "float32"}, {"name": "588", "dtype": "float32"}, {"name": "589", "dtype": "float32"}, {"name": "590", "dtype": "float32"}, {"name": "591", "dtype": "float32"}, {"name": "592", "dtype": "float32"}, {"name": "593", "dtype": "float32"}, {"name": "594", "dtype": "float32"}, {"name": "595", "dtype": "float32"}, {"name": "596", "dtype": "float32"}, {"name": "597", "dtype": "float32"}, {"name": "598", "dtype": "float32"}, {"name": "599", "dtype": "float32"}, {"name": "600", "dtype": "float32"}, {"name": "601", "dtype": "float32"}, {"name": "602", "dtype": "float32"}, {"name": "603", "dtype": "float32"}, {"name": "604", "dtype": "float32"}, {"name": "605", "dtype": "float32"}, {"name": "606", "dtype": "float32"}, {"name": "607", "dtype": "float32"}, {"name": "608", "dtype": "float32"}, {"name": "609", "dtype": "float32"}, {"name": "610", "dtype": "float32"}, {"name": "611", "dtype": "float32"}, {"name": "612", "dtype": "float32"}, {"name": "613", "dtype": "float32"}, {"name": "614", "dtype": "float32"}, {"name": "615", "dtype": "float32"}, {"name": "616", "dtype": "float32"}, {"name": "617", "dtype": "float32"}, {"name": "618", "dtype": "float32"}, {"name": "619", "dtype": "float32"}, {"name": "620", "dtype": "float32"}, {"name": "621", "dtype": "float32"}, {"name": "622", "dtype": "float32"}, {"name": "623", "dtype": "float32"}, {"name": "624", "dtype": "float32"}, {"name": "625", "dtype": "float32"}, {"name": "626", "dtype": "float32"}, {"name": "627", "dtype": "float32"}, {"name": "628", "dtype": "float32"}, {"name": "629", "dtype": "float32"}, {"name": "630", "dtype": "float32"}, {"name": "631", "dtype": "float32"}, {"name": "632", "dtype": "float32"}, {"name": "633", "dtype": "float32"}, {"name": "634", "dtype": "float32"}, {"name": "635", "dtype": "float32"}, {"name": "636", "dtype": "float32"}, {"name": "637", "dtype": "float32"}, {"name": "638", "dtype": "float32"}, {"name": "639", "dtype": "float32"}, {"name": "640", "dtype": "float32"}, {"name": "641", "dtype": "float32"}, {"name": "642", "dtype": "float32"}, {"name": "643", "dtype": "float32"}, {"name": "644", "dtype": "float32"}, {"name": "645", "dtype": "float32"}, {"name": "646", "dtype": "float32"}, {"name": "647", "dtype": "float32"}, {"name": "648", "dtype": "float32"}, {"name": "649", "dtype": "float32"}, {"name": "650", "dtype": "float32"}, {"name": "651", "dtype": "float32"}, {"name": "652", "dtype": "float32"}, {"name": "653", "dtype": "float32"}, {"name": "654", "dtype": "float32"}, {"name": "655", "dtype": "float32"}, {"name": "656", "dtype": "float32"}, {"name": "657", "dtype": "float32"}, {"name": "658", "dtype": "float32"}, {"name": "659", "dtype": "float32"}, {"name": "660", "dtype": "float32"}, {"name": "661", "dtype": "float32"}, {"name": "662", "dtype": "float32"}, {"name": "663", "dtype": "float32"}, {"name": "664", "dtype": "float32"}, {"name": "665", "dtype": "float32"}, {"name": "666", "dtype": "float32"}, {"name": "667", "dtype": "float32"}, {"name": "668", "dtype": "float32"}, {"name": "669", "dtype": "float32"}, {"name": "670", "dtype": "float32"}, {"name": "671", "dtype": "float32"}, {"name": "672", "dtype": "float32"}, {"name": "673", "dtype": "float32"}, {"name": "674", "dtype": "float32"}, {"name": "675", "dtype": "float32"}, {"name": "676", "dtype": "float32"}, {"name": "677", "dtype": "float32"}, {"name": "678", "dtype": "float32"}, {"name": "679", "dtype": "float32"}, {"name": "680", "dtype": "float32"}, {"name": "681", "dtype": "float32"}, {"name": "682", "dtype": "float32"}, {"name": "683", "dtype": "float32"}, {"name": "684", "dtype": "float32"}, {"name": "685", "dtype": "float32"}, {"name": "686", "dtype": "float32"}, {"name": "687", "dtype": "float32"}, {"name": "688", "dtype": "float32"}, {"name": "689", "dtype": "float32"}, {"name": "690", "dtype": "float32"}, {"name": "691", "dtype": "float32"}, {"name": "692", "dtype": "float32"}, {"name": "693", "dtype": "float32"}, {"name": "694", "dtype": "float32"}, {"name": "695", "dtype": "float32"}, {"name": "696", "dtype": "float32"}, {"name": "697", "dtype": "float32"}, {"name": "698", "dtype": "float32"}, {"name": "699", "dtype": "float32"}, {"name": "700", "dtype": "float32"}, {"name": "701", "dtype": "float32"}, {"name": "702", "dtype": "float32"}, {"name": "703", "dtype": "float32"}, {"name": "704", "dtype": "float32"}, {"name": "705", "dtype": "float32"}, {"name": "706", "dtype": "float32"}, {"name": "707", "dtype": "float32"}, {"name": "708", "dtype": "float32"}, {"name": "709", "dtype": "float32"}, {"name": "710", "dtype": "float32"}, {"name": "711", "dtype": "float32"}, {"name": "712", "dtype": "float32"}, {"name": "713", "dtype": "float32"}, {"name": "714", "dtype": "float32"}, {"name": "715", "dtype": "float32"}, {"name": "716", "dtype": "float32"}, {"name": "717", "dtype": "float32"}, {"name": "718", "dtype": "float32"}, {"name": "719", "dtype": "float32"}, {"name": "720", "dtype": "float32"}, {"name": "721", "dtype": "float32"}, {"name": "722", "dtype": "float32"}, {"name": "723", "dtype": "float32"}, {"name": "724", "dtype": "float32"}, {"name": "725", "dtype": "float32"}, {"name": "726", "dtype": "float32"}, {"name": "727", "dtype": "float32"}, {"name": "728", "dtype": "float32"}, {"name": "729", "dtype": "float32"}, {"name": "730", "dtype": "float32"}, {"name": "731", "dtype": "float32"}, {"name": "732", "dtype": "float32"}, {"name": "733", "dtype": "float32"}, {"name": "734", "dtype": "float32"}, {"name": "735", "dtype": "float32"}, {"name": "736", "dtype": "float32"}, {"name": "737", "dtype": "float32"}, {"name": "738", "dtype": "float32"}, {"name": "739", "dtype": "float32"}, {"name": "740", "dtype": "float32"}, {"name": "741", "dtype": "float32"}, {"name": "742", "dtype": "float32"}, {"name": "743", "dtype": "float32"}, {"name": "744", "dtype": "float32"}, {"name": "745", "dtype": "float32"}, {"name": "746", "dtype": "float32"}, {"name": "747", "dtype": "float32"}, {"name": "748", "dtype": "float32"}, {"name": "749", "dtype": "float32"}, {"name": "750", "dtype": "float32"}, {"name": "751", "dtype": "float32"}, {"name": "752", "dtype": "float32"}, {"name": "753", "dtype": "float32"}, {"name": "754", "dtype": "float32"}, {"name": "755", "dtype": "float32"}, {"name": "756", "dtype": "float32"}, {"name": "757", "dtype": "float32"}, {"name": "758", "dtype": "float32"}, {"name": "759", "dtype": "float32"}, {"name": "760", "dtype": "float32"}, {"name": "761", "dtype": "float32"}, {"name": "762", "dtype": "float32"}, {"name": "763", "dtype": "float32"}, {"name": "764", "dtype": "float32"}, {"name": "765", "dtype": "float32"}, {"name": "766", "dtype": "float32"}, {"name": "767", "dtype": "float32"}, {"name": "label", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 115582709.0625, "num_examples": 37500}, {"name": "test", "num_bytes": 38527570.0, "num_examples": 12500}], "download_size": 211882718, "dataset_size": 154110279.0625}}
2023-08-23T05:17:21+00:00
[]
[]
TAGS #region-us
# Dataset Card for "BGL_DistilRoBERTa_Finetuned" More Information needed
[ "# Dataset Card for \"BGL_DistilRoBERTa_Finetuned\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"BGL_DistilRoBERTa_Finetuned\"\n\nMore Information needed" ]
[ 6, 23 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"BGL_DistilRoBERTa_Finetuned\"\n\nMore Information needed" ]
75d3e6701e97961bf9761d67dd60be9906ca2a6a
# Dataset of niwatari_kutaka (Touhou) This is the dataset of niwatari_kutaka (Touhou), containing 500 images and their tags. The core tags of this character are `blonde_hair, short_hair, multicolored_hair, two-tone_hair, red_hair, wings, on_head, bird_wings, animal_on_head, feathered_wings, red_eyes, yellow_wings, tail, bird_tail`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:------------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 500 | 582.59 MiB | [Download](https://huggingface.co/datasets/CyberHarem/niwatari_kutaka_touhou/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 500 | 347.58 MiB | [Download](https://huggingface.co/datasets/CyberHarem/niwatari_kutaka_touhou/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 1144 | 722.53 MiB | [Download](https://huggingface.co/datasets/CyberHarem/niwatari_kutaka_touhou/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 500 | 519.01 MiB | [Download](https://huggingface.co/datasets/CyberHarem/niwatari_kutaka_touhou/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 1144 | 975.60 MiB | [Download](https://huggingface.co/datasets/CyberHarem/niwatari_kutaka_touhou/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/niwatari_kutaka_touhou', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 0 | 8 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, bird_on_head, chick, looking_at_viewer, open_mouth, orange_dress, simple_background, solo, white_shirt, blush, white_background, tail_feathers, puffy_short_sleeves, animal, smile | | 1 | 15 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, chick, orange_dress, puffy_short_sleeves, solo, white_shirt, looking_at_viewer, bird_on_head, open_mouth, boots, brown_footwear, tail_feathers, blush, animal, brown_dress, simple_background, white_background, :d | | 2 | 5 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, boots, chick, closed_mouth, full_body, orange_dress, puffy_short_sleeves, simple_background, solo, tail_feathers, white_background, white_shirt, bird_on_head, blush, brown_footwear, looking_at_viewer, smile, bangs, breasts | | 3 | 10 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, chick, dress, looking_at_viewer, simple_background, solo, upper_body, white_shirt, bird_on_head, white_background, closed_mouth, smile, animal, blush, puffy_short_sleeves | | 4 | 5 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | 1girl, :d, ahoge, blush_stickers, chibi, chick, open_mouth, skirt, solo, white_shirt, brown_background, orange_dress, puffy_short_sleeves, animal, barefoot, bird_on_head, full_body, hair_between_eyes, simple_background, standing, bangs | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | bird_on_head | chick | looking_at_viewer | open_mouth | orange_dress | simple_background | solo | white_shirt | blush | white_background | tail_feathers | puffy_short_sleeves | animal | smile | boots | brown_footwear | brown_dress | :d | closed_mouth | full_body | bangs | breasts | dress | upper_body | ahoge | blush_stickers | chibi | skirt | brown_background | barefoot | hair_between_eyes | standing | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:---------------|:--------|:--------------------|:-------------|:---------------|:--------------------|:-------|:--------------|:--------|:-------------------|:----------------|:----------------------|:---------|:--------|:--------|:-----------------|:--------------|:-----|:---------------|:------------|:--------|:----------|:--------|:-------------|:--------|:-----------------|:--------|:--------|:-------------------|:-----------|:--------------------|:-----------| | 0 | 8 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | 1 | 15 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | X | X | X | X | | | | | | | | | | | | | | | | 2 | 5 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | X | X | X | | X | X | X | X | X | X | X | X | | X | X | X | | | X | X | X | X | | | | | | | | | | | | 3 | 10 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | X | X | X | | | X | X | X | X | X | | X | X | X | | | | | X | | | | X | X | | | | | | | | | | 4 | 5 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | X | X | X | | X | X | X | X | X | | | | X | X | | | | | X | | X | X | | | | X | X | X | X | X | X | X | X |
CyberHarem/niwatari_kutaka_touhou
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-18T15:19:53+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-01-15T09:02:09+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of niwatari\_kutaka (Touhou) ==================================== This is the dataset of niwatari\_kutaka (Touhou), containing 500 images and their tags. The core tags of this character are 'blonde\_hair, short\_hair, multicolored\_hair, two-tone\_hair, red\_hair, wings, on\_head, bird\_wings, animal\_on\_head, feathered\_wings, red\_eyes, yellow\_wings, tail, bird\_tail', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ 44, 61, 5, 4 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.### Raw Text Version### Table Version" ]
8d78f45a6bedbe260f783c88aa5a539b4a670117
## describe 非常navie的场景对话,但可能出现真实场景信息,比如XX医院,XX医生 对纯指令数据质量要求较高的需要进一步清洗,只用来健康场景finetune maybe enough ## from [[Medical-Dialogue-System]](https://github.com/UCSD-AI4H/Medical-Dialogue-System) *[[medical_dialog]](https://huggingface.co/datasets/medical_dialog) ## format ```json { "instruction": null, "input": "不知道,我是在09年8月份,白天出了很多的汗,晚上睡觉突然醒来,看房子天晕地转,过了大约也就一分钟的样子,就不转了.但头向左转动就又转,左边头皮还发麻.第二天起来,人没有精神,过了段时间.病情时轻时重,好像是躺在床上向右人就一上晕了.但时间不长.有一天开了一天的车,晚上先是有点头晕,走路不稳,上床休息,但突然后脑根部特别疼,到了第二天也不疼了.到现在也没有疼过.现在就是躺下和起床特别晕(头向右和头向上或向下),走路不稳.特别是站久了,就要倒了感觉.另外平常,脑袋感觉昏沉沉的,有时眼睛看东西跟不上速度,要晕的,晕的时候是脑袋里跟一片去飘过的。", "output": "你得的是颈椎间盘突出,可以先做保守治疗。", "history": [ [ "但,很多医生看了片子,说是张口位片枢椎似乎有些旋转移位 ,不知有没有啊。", "枢椎旋转移位不太可能,你的片子不是很清楚。请咨询我院骨科。" ], [ "好的,谢谢大夫,祝您新春愉快。", "不客气!" ] ] } ``` ## usage ```pyth from datasets import load_dataset ds = load_dataset("ticoAg/Medical-Dialogue-System") ``` ## process script ```python data_dir = Path("medical_dialog\data\processed-chinese") raw_train_ds = loadJS(data_dir.joinpath("train_data.json")) raw_test_ds = loadJS(data_dir.joinpath("test_data.json")) raw_valid_ds = loadJS(data_dir.joinpath("validate_data.json")) raw_ds = raw_train_ds + raw_test_ds + raw_valid_ds _ds = [] for i in tqdm(raw_ds): _diag = [{"role": dialog[:2], "content": dialog[3:]} for dialog in i] meta_data = sft_meta(input=_diag[0]['content'], output=_diag[1]['content']) if len(_diag[1]['content']) <= 6: continue # 过滤掉太短的单次回复 if len(_diag) > 2: meta_data['history'] = [[_diag[2*idx]['content'], _diag[2*idx+1]['content']] for idx in range(len(_diag)//2)][1:] meta_data = sortDict(meta_data) _ds += [meta_data] ```
ticoAg/Medical-Dialogue-System
[ "task_categories:question-answering", "task_categories:text-generation", "size_categories:1M<n<10M", "language:zh", "license:apache-2.0", "region:us" ]
2023-08-18T15:22:43+00:00
{"language": ["zh"], "license": "apache-2.0", "size_categories": ["1M<n<10M"], "task_categories": ["question-answering", "text-generation"], "num rows": 3206606, "file size": "2.09 GB"}
2023-08-19T09:57:30+00:00
[]
[ "zh" ]
TAGS #task_categories-question-answering #task_categories-text-generation #size_categories-1M<n<10M #language-Chinese #license-apache-2.0 #region-us
## describe 非常navie的场景对话,但可能出现真实场景信息,比如XX医院,XX医生 对纯指令数据质量要求较高的需要进一步清洗,只用来健康场景finetune maybe enough ## from [[Medical-Dialogue-System]](URL *[[medical_dialog]](URL ## format ## usage ## process script
[ "## describe\n\n非常navie的场景对话,但可能出现真实场景信息,比如XX医院,XX医生\n\n对纯指令数据质量要求较高的需要进一步清洗,只用来健康场景finetune maybe enough", "## from\n[[Medical-Dialogue-System]](URL\n\n*[[medical_dialog]](URL", "## format", "## usage", "## process script" ]
[ "TAGS\n#task_categories-question-answering #task_categories-text-generation #size_categories-1M<n<10M #language-Chinese #license-apache-2.0 #region-us \n", "## describe\n\n非常navie的场景对话,但可能出现真实场景信息,比如XX医院,XX医生\n\n对纯指令数据质量要求较高的需要进一步清洗,只用来健康场景finetune maybe enough", "## from\n[[Medical-Dialogue-System]](URL\n\n*[[medical_dialog]](URL", "## format", "## usage", "## process script" ]
[ 54, 44, 25, 2, 2, 3 ]
[ "passage: TAGS\n#task_categories-question-answering #task_categories-text-generation #size_categories-1M<n<10M #language-Chinese #license-apache-2.0 #region-us \n## describe\n\n非常navie的场景对话,但可能出现真实场景信息,比如XX医院,XX医生\n\n对纯指令数据质量要求较高的需要进一步清洗,只用来健康场景finetune maybe enough## from\n[[Medical-Dialogue-System]](URL\n\n*[[medical_dialog]](URL## format## usage## process script" ]
d9a4186038c87bb9b6b2a34e859335fa5994b94d
# Dataset Card for "BGL_GPT2_Finetuned" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
EgilKarlsen/BGL_GPT2_Finetuned
[ "region:us" ]
2023-08-18T15:25:15+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "0", "dtype": "float32"}, {"name": "1", "dtype": "float32"}, {"name": "2", "dtype": "float32"}, {"name": "3", "dtype": "float32"}, {"name": "4", "dtype": "float32"}, {"name": "5", "dtype": "float32"}, {"name": "6", "dtype": "float32"}, {"name": "7", "dtype": "float32"}, {"name": "8", "dtype": "float32"}, {"name": "9", "dtype": "float32"}, {"name": "10", "dtype": "float32"}, {"name": "11", "dtype": "float32"}, {"name": "12", "dtype": "float32"}, {"name": "13", "dtype": "float32"}, {"name": "14", "dtype": "float32"}, {"name": "15", "dtype": "float32"}, {"name": "16", "dtype": "float32"}, {"name": "17", "dtype": "float32"}, {"name": "18", "dtype": "float32"}, {"name": "19", "dtype": "float32"}, {"name": "20", "dtype": "float32"}, {"name": "21", "dtype": "float32"}, {"name": "22", "dtype": "float32"}, {"name": "23", "dtype": "float32"}, {"name": "24", "dtype": "float32"}, {"name": "25", "dtype": "float32"}, {"name": "26", "dtype": "float32"}, {"name": "27", "dtype": "float32"}, {"name": "28", "dtype": "float32"}, {"name": "29", "dtype": "float32"}, {"name": "30", "dtype": "float32"}, {"name": "31", "dtype": "float32"}, {"name": "32", "dtype": "float32"}, {"name": "33", "dtype": "float32"}, {"name": "34", "dtype": "float32"}, {"name": "35", "dtype": "float32"}, {"name": "36", "dtype": "float32"}, {"name": "37", "dtype": "float32"}, {"name": "38", "dtype": "float32"}, {"name": "39", "dtype": "float32"}, {"name": "40", "dtype": "float32"}, {"name": "41", "dtype": "float32"}, {"name": "42", "dtype": "float32"}, {"name": "43", "dtype": "float32"}, {"name": "44", "dtype": "float32"}, {"name": "45", "dtype": "float32"}, {"name": "46", "dtype": "float32"}, {"name": "47", "dtype": "float32"}, {"name": "48", "dtype": "float32"}, {"name": "49", "dtype": "float32"}, {"name": "50", "dtype": "float32"}, {"name": "51", "dtype": "float32"}, {"name": "52", "dtype": "float32"}, {"name": "53", "dtype": "float32"}, {"name": "54", "dtype": "float32"}, {"name": "55", "dtype": "float32"}, {"name": "56", "dtype": "float32"}, {"name": "57", "dtype": "float32"}, {"name": "58", "dtype": "float32"}, {"name": "59", "dtype": "float32"}, {"name": "60", "dtype": "float32"}, {"name": "61", "dtype": "float32"}, {"name": "62", "dtype": "float32"}, {"name": "63", "dtype": "float32"}, {"name": "64", "dtype": "float32"}, {"name": "65", "dtype": "float32"}, {"name": "66", "dtype": "float32"}, {"name": "67", "dtype": "float32"}, {"name": "68", "dtype": "float32"}, {"name": "69", "dtype": "float32"}, {"name": "70", "dtype": "float32"}, {"name": "71", "dtype": "float32"}, {"name": "72", "dtype": "float32"}, {"name": "73", "dtype": "float32"}, {"name": "74", "dtype": "float32"}, {"name": "75", "dtype": "float32"}, {"name": "76", "dtype": "float32"}, {"name": "77", "dtype": "float32"}, {"name": "78", "dtype": "float32"}, {"name": "79", "dtype": "float32"}, {"name": "80", "dtype": "float32"}, {"name": "81", "dtype": "float32"}, {"name": "82", "dtype": "float32"}, {"name": "83", "dtype": "float32"}, {"name": "84", "dtype": "float32"}, {"name": "85", "dtype": "float32"}, {"name": "86", "dtype": "float32"}, {"name": "87", "dtype": "float32"}, {"name": "88", "dtype": "float32"}, {"name": "89", "dtype": "float32"}, {"name": "90", "dtype": "float32"}, {"name": "91", "dtype": "float32"}, {"name": "92", "dtype": "float32"}, {"name": "93", "dtype": "float32"}, {"name": "94", "dtype": "float32"}, {"name": "95", "dtype": "float32"}, {"name": "96", "dtype": "float32"}, {"name": "97", "dtype": "float32"}, {"name": "98", "dtype": "float32"}, {"name": "99", "dtype": "float32"}, {"name": "100", "dtype": "float32"}, {"name": "101", "dtype": "float32"}, {"name": "102", "dtype": "float32"}, {"name": "103", "dtype": "float32"}, {"name": "104", "dtype": "float32"}, {"name": "105", "dtype": "float32"}, {"name": "106", "dtype": "float32"}, {"name": "107", "dtype": "float32"}, {"name": "108", "dtype": "float32"}, {"name": "109", "dtype": "float32"}, {"name": "110", "dtype": "float32"}, {"name": "111", "dtype": "float32"}, {"name": "112", "dtype": "float32"}, {"name": "113", "dtype": "float32"}, {"name": "114", "dtype": "float32"}, {"name": "115", "dtype": "float32"}, {"name": "116", "dtype": "float32"}, {"name": "117", "dtype": "float32"}, {"name": "118", "dtype": "float32"}, {"name": "119", "dtype": "float32"}, {"name": "120", "dtype": "float32"}, {"name": "121", "dtype": "float32"}, {"name": "122", "dtype": "float32"}, {"name": "123", "dtype": "float32"}, {"name": "124", "dtype": "float32"}, {"name": "125", "dtype": "float32"}, {"name": "126", "dtype": "float32"}, {"name": "127", "dtype": "float32"}, {"name": "128", "dtype": "float32"}, {"name": "129", "dtype": "float32"}, {"name": "130", "dtype": "float32"}, {"name": "131", "dtype": "float32"}, {"name": "132", "dtype": "float32"}, {"name": "133", "dtype": "float32"}, {"name": "134", "dtype": "float32"}, {"name": "135", "dtype": "float32"}, {"name": "136", "dtype": "float32"}, {"name": "137", "dtype": "float32"}, {"name": "138", "dtype": "float32"}, {"name": "139", "dtype": "float32"}, {"name": "140", "dtype": "float32"}, {"name": "141", "dtype": "float32"}, {"name": "142", "dtype": "float32"}, {"name": "143", "dtype": "float32"}, {"name": "144", "dtype": "float32"}, {"name": "145", "dtype": "float32"}, {"name": "146", "dtype": "float32"}, {"name": "147", "dtype": "float32"}, {"name": "148", "dtype": "float32"}, {"name": "149", "dtype": "float32"}, {"name": "150", "dtype": "float32"}, {"name": "151", "dtype": "float32"}, {"name": "152", "dtype": "float32"}, {"name": "153", "dtype": "float32"}, {"name": "154", "dtype": "float32"}, {"name": "155", "dtype": "float32"}, {"name": "156", "dtype": "float32"}, {"name": "157", "dtype": "float32"}, {"name": "158", "dtype": "float32"}, {"name": "159", "dtype": "float32"}, {"name": "160", "dtype": "float32"}, {"name": "161", "dtype": "float32"}, {"name": "162", "dtype": "float32"}, {"name": "163", "dtype": "float32"}, {"name": "164", "dtype": "float32"}, {"name": "165", "dtype": "float32"}, {"name": "166", "dtype": "float32"}, {"name": "167", "dtype": "float32"}, {"name": "168", "dtype": "float32"}, {"name": "169", "dtype": "float32"}, {"name": "170", "dtype": "float32"}, {"name": "171", "dtype": "float32"}, {"name": "172", "dtype": "float32"}, {"name": "173", "dtype": "float32"}, {"name": "174", "dtype": "float32"}, {"name": "175", "dtype": "float32"}, {"name": "176", "dtype": "float32"}, {"name": "177", "dtype": "float32"}, {"name": "178", "dtype": "float32"}, {"name": "179", "dtype": "float32"}, {"name": "180", "dtype": "float32"}, {"name": "181", "dtype": "float32"}, {"name": "182", "dtype": "float32"}, {"name": "183", "dtype": "float32"}, {"name": "184", "dtype": "float32"}, {"name": "185", "dtype": "float32"}, {"name": "186", "dtype": "float32"}, {"name": "187", "dtype": "float32"}, {"name": "188", "dtype": "float32"}, {"name": "189", "dtype": "float32"}, {"name": "190", "dtype": "float32"}, {"name": "191", "dtype": "float32"}, {"name": "192", "dtype": "float32"}, {"name": "193", "dtype": "float32"}, {"name": "194", "dtype": "float32"}, {"name": "195", "dtype": "float32"}, {"name": "196", "dtype": "float32"}, {"name": "197", "dtype": "float32"}, {"name": "198", "dtype": "float32"}, {"name": "199", "dtype": "float32"}, {"name": "200", "dtype": "float32"}, {"name": "201", "dtype": "float32"}, {"name": "202", "dtype": "float32"}, {"name": "203", "dtype": "float32"}, {"name": "204", "dtype": "float32"}, {"name": "205", "dtype": "float32"}, {"name": "206", "dtype": "float32"}, {"name": "207", "dtype": "float32"}, {"name": "208", "dtype": "float32"}, {"name": "209", "dtype": "float32"}, {"name": "210", "dtype": "float32"}, {"name": "211", "dtype": "float32"}, {"name": "212", "dtype": "float32"}, {"name": "213", "dtype": "float32"}, {"name": "214", "dtype": "float32"}, {"name": "215", "dtype": "float32"}, {"name": "216", "dtype": "float32"}, {"name": "217", "dtype": "float32"}, {"name": "218", "dtype": "float32"}, {"name": "219", "dtype": "float32"}, {"name": "220", "dtype": "float32"}, {"name": "221", "dtype": "float32"}, {"name": "222", "dtype": "float32"}, {"name": "223", "dtype": "float32"}, {"name": "224", "dtype": "float32"}, {"name": "225", "dtype": "float32"}, {"name": "226", "dtype": "float32"}, {"name": "227", "dtype": "float32"}, {"name": "228", "dtype": "float32"}, {"name": "229", "dtype": "float32"}, {"name": "230", "dtype": "float32"}, {"name": "231", "dtype": "float32"}, {"name": "232", "dtype": "float32"}, {"name": "233", "dtype": "float32"}, {"name": "234", "dtype": "float32"}, {"name": "235", "dtype": "float32"}, {"name": "236", "dtype": "float32"}, {"name": "237", "dtype": "float32"}, {"name": "238", "dtype": "float32"}, {"name": "239", "dtype": "float32"}, {"name": "240", "dtype": "float32"}, {"name": "241", "dtype": "float32"}, {"name": "242", "dtype": "float32"}, {"name": "243", "dtype": "float32"}, {"name": "244", "dtype": "float32"}, {"name": "245", "dtype": "float32"}, {"name": "246", "dtype": "float32"}, {"name": "247", "dtype": "float32"}, {"name": "248", "dtype": "float32"}, {"name": "249", "dtype": "float32"}, {"name": "250", "dtype": "float32"}, {"name": "251", "dtype": "float32"}, {"name": "252", "dtype": "float32"}, {"name": "253", "dtype": "float32"}, {"name": "254", "dtype": "float32"}, {"name": "255", "dtype": "float32"}, {"name": "256", "dtype": "float32"}, {"name": "257", "dtype": "float32"}, {"name": "258", "dtype": "float32"}, {"name": "259", "dtype": "float32"}, {"name": "260", "dtype": "float32"}, {"name": "261", "dtype": "float32"}, {"name": "262", "dtype": "float32"}, {"name": "263", "dtype": "float32"}, {"name": "264", "dtype": "float32"}, {"name": "265", "dtype": "float32"}, {"name": "266", "dtype": "float32"}, {"name": "267", "dtype": "float32"}, {"name": "268", "dtype": "float32"}, {"name": "269", "dtype": "float32"}, {"name": "270", "dtype": "float32"}, {"name": "271", "dtype": "float32"}, {"name": "272", "dtype": "float32"}, {"name": "273", "dtype": "float32"}, {"name": "274", "dtype": "float32"}, {"name": "275", "dtype": "float32"}, {"name": "276", "dtype": "float32"}, {"name": "277", "dtype": "float32"}, {"name": "278", "dtype": "float32"}, {"name": "279", "dtype": "float32"}, {"name": "280", "dtype": "float32"}, {"name": "281", "dtype": "float32"}, {"name": "282", "dtype": "float32"}, {"name": "283", "dtype": "float32"}, {"name": "284", "dtype": "float32"}, {"name": "285", "dtype": "float32"}, {"name": "286", "dtype": "float32"}, {"name": "287", "dtype": "float32"}, {"name": "288", "dtype": "float32"}, {"name": "289", "dtype": "float32"}, {"name": "290", "dtype": "float32"}, {"name": "291", "dtype": "float32"}, {"name": "292", "dtype": "float32"}, {"name": "293", "dtype": "float32"}, {"name": "294", "dtype": "float32"}, {"name": "295", "dtype": "float32"}, {"name": "296", "dtype": "float32"}, {"name": "297", "dtype": "float32"}, {"name": "298", "dtype": "float32"}, {"name": "299", "dtype": "float32"}, {"name": "300", "dtype": "float32"}, {"name": "301", "dtype": "float32"}, {"name": "302", "dtype": "float32"}, {"name": "303", "dtype": "float32"}, {"name": "304", "dtype": "float32"}, {"name": "305", "dtype": "float32"}, {"name": "306", "dtype": "float32"}, {"name": "307", "dtype": "float32"}, {"name": "308", "dtype": "float32"}, {"name": "309", "dtype": "float32"}, {"name": "310", "dtype": "float32"}, {"name": "311", "dtype": "float32"}, {"name": "312", "dtype": "float32"}, {"name": "313", "dtype": "float32"}, {"name": "314", "dtype": "float32"}, {"name": "315", "dtype": "float32"}, {"name": "316", "dtype": "float32"}, {"name": "317", "dtype": "float32"}, {"name": "318", "dtype": "float32"}, {"name": "319", "dtype": "float32"}, {"name": "320", "dtype": "float32"}, {"name": "321", "dtype": "float32"}, {"name": "322", "dtype": "float32"}, {"name": "323", "dtype": "float32"}, {"name": "324", "dtype": "float32"}, {"name": "325", "dtype": "float32"}, {"name": "326", "dtype": "float32"}, {"name": "327", "dtype": "float32"}, {"name": "328", "dtype": "float32"}, {"name": "329", "dtype": "float32"}, {"name": "330", "dtype": "float32"}, {"name": "331", "dtype": "float32"}, {"name": "332", "dtype": "float32"}, {"name": "333", "dtype": "float32"}, {"name": "334", "dtype": "float32"}, {"name": "335", "dtype": "float32"}, {"name": "336", "dtype": "float32"}, {"name": "337", "dtype": "float32"}, {"name": "338", "dtype": "float32"}, {"name": "339", "dtype": "float32"}, {"name": "340", "dtype": "float32"}, {"name": "341", "dtype": "float32"}, {"name": "342", "dtype": "float32"}, {"name": "343", "dtype": "float32"}, {"name": "344", "dtype": "float32"}, {"name": "345", "dtype": "float32"}, {"name": "346", "dtype": "float32"}, {"name": "347", "dtype": "float32"}, {"name": "348", "dtype": "float32"}, {"name": "349", "dtype": "float32"}, {"name": "350", "dtype": "float32"}, {"name": "351", "dtype": "float32"}, {"name": "352", "dtype": "float32"}, {"name": "353", "dtype": "float32"}, {"name": "354", "dtype": "float32"}, {"name": "355", "dtype": "float32"}, {"name": "356", "dtype": "float32"}, {"name": "357", "dtype": "float32"}, {"name": "358", "dtype": "float32"}, {"name": "359", "dtype": "float32"}, {"name": "360", "dtype": "float32"}, {"name": "361", "dtype": "float32"}, {"name": "362", "dtype": "float32"}, {"name": "363", "dtype": "float32"}, {"name": "364", "dtype": "float32"}, {"name": "365", "dtype": "float32"}, {"name": "366", "dtype": "float32"}, {"name": "367", "dtype": "float32"}, {"name": "368", "dtype": "float32"}, {"name": "369", "dtype": "float32"}, {"name": "370", "dtype": "float32"}, {"name": "371", "dtype": "float32"}, {"name": "372", "dtype": "float32"}, {"name": "373", "dtype": "float32"}, {"name": "374", "dtype": "float32"}, {"name": "375", "dtype": "float32"}, {"name": "376", "dtype": "float32"}, {"name": "377", "dtype": "float32"}, {"name": "378", "dtype": "float32"}, {"name": "379", "dtype": "float32"}, {"name": "380", "dtype": "float32"}, {"name": "381", "dtype": "float32"}, {"name": "382", "dtype": "float32"}, {"name": "383", "dtype": "float32"}, {"name": "384", "dtype": "float32"}, {"name": "385", "dtype": "float32"}, {"name": "386", "dtype": "float32"}, {"name": "387", "dtype": "float32"}, {"name": "388", "dtype": "float32"}, {"name": "389", "dtype": "float32"}, {"name": "390", "dtype": "float32"}, {"name": "391", "dtype": "float32"}, {"name": "392", "dtype": "float32"}, {"name": "393", "dtype": "float32"}, {"name": "394", "dtype": "float32"}, {"name": "395", "dtype": "float32"}, {"name": "396", "dtype": "float32"}, {"name": "397", "dtype": "float32"}, {"name": "398", "dtype": "float32"}, {"name": "399", "dtype": "float32"}, {"name": "400", "dtype": "float32"}, {"name": "401", "dtype": "float32"}, {"name": "402", "dtype": "float32"}, {"name": "403", "dtype": "float32"}, {"name": "404", "dtype": "float32"}, {"name": "405", "dtype": "float32"}, {"name": "406", "dtype": "float32"}, {"name": "407", "dtype": "float32"}, {"name": "408", "dtype": "float32"}, {"name": "409", "dtype": "float32"}, {"name": "410", "dtype": "float32"}, {"name": "411", "dtype": "float32"}, {"name": "412", "dtype": "float32"}, {"name": "413", "dtype": "float32"}, {"name": "414", "dtype": "float32"}, {"name": "415", "dtype": "float32"}, {"name": "416", "dtype": "float32"}, {"name": "417", "dtype": "float32"}, {"name": "418", "dtype": "float32"}, {"name": "419", "dtype": "float32"}, {"name": "420", "dtype": "float32"}, {"name": "421", "dtype": "float32"}, {"name": "422", "dtype": "float32"}, {"name": "423", "dtype": "float32"}, {"name": "424", "dtype": "float32"}, {"name": "425", "dtype": "float32"}, {"name": "426", "dtype": "float32"}, {"name": "427", "dtype": "float32"}, {"name": "428", "dtype": "float32"}, {"name": "429", "dtype": "float32"}, {"name": "430", "dtype": "float32"}, {"name": "431", "dtype": "float32"}, {"name": "432", "dtype": "float32"}, {"name": "433", "dtype": "float32"}, {"name": "434", "dtype": "float32"}, {"name": "435", "dtype": "float32"}, {"name": "436", "dtype": "float32"}, {"name": "437", "dtype": "float32"}, {"name": "438", "dtype": "float32"}, {"name": "439", "dtype": "float32"}, {"name": "440", "dtype": "float32"}, {"name": "441", "dtype": "float32"}, {"name": "442", "dtype": "float32"}, {"name": "443", "dtype": "float32"}, {"name": "444", "dtype": "float32"}, {"name": "445", "dtype": "float32"}, {"name": "446", "dtype": "float32"}, {"name": "447", "dtype": "float32"}, {"name": "448", "dtype": "float32"}, {"name": "449", "dtype": "float32"}, {"name": "450", "dtype": "float32"}, {"name": "451", "dtype": "float32"}, {"name": "452", "dtype": "float32"}, {"name": "453", "dtype": "float32"}, {"name": "454", "dtype": "float32"}, {"name": "455", "dtype": "float32"}, {"name": "456", "dtype": "float32"}, {"name": "457", "dtype": "float32"}, {"name": "458", "dtype": "float32"}, {"name": "459", "dtype": "float32"}, {"name": "460", "dtype": "float32"}, {"name": "461", "dtype": "float32"}, {"name": "462", "dtype": "float32"}, {"name": "463", "dtype": "float32"}, {"name": "464", "dtype": "float32"}, {"name": "465", "dtype": "float32"}, {"name": "466", "dtype": "float32"}, {"name": "467", "dtype": "float32"}, {"name": "468", "dtype": "float32"}, {"name": "469", "dtype": "float32"}, {"name": "470", "dtype": "float32"}, {"name": "471", "dtype": "float32"}, {"name": "472", "dtype": "float32"}, {"name": "473", "dtype": "float32"}, {"name": "474", "dtype": "float32"}, {"name": "475", "dtype": "float32"}, {"name": "476", "dtype": "float32"}, {"name": "477", "dtype": "float32"}, {"name": "478", "dtype": "float32"}, {"name": "479", "dtype": "float32"}, {"name": "480", "dtype": "float32"}, {"name": "481", "dtype": "float32"}, {"name": "482", "dtype": "float32"}, {"name": "483", "dtype": "float32"}, {"name": "484", "dtype": "float32"}, {"name": "485", "dtype": "float32"}, {"name": "486", "dtype": "float32"}, {"name": "487", "dtype": "float32"}, {"name": "488", "dtype": "float32"}, {"name": "489", "dtype": "float32"}, {"name": "490", "dtype": "float32"}, {"name": "491", "dtype": "float32"}, {"name": "492", "dtype": "float32"}, {"name": "493", "dtype": "float32"}, {"name": "494", "dtype": "float32"}, {"name": "495", "dtype": "float32"}, {"name": "496", "dtype": "float32"}, {"name": "497", "dtype": "float32"}, {"name": "498", "dtype": "float32"}, {"name": "499", "dtype": "float32"}, {"name": "500", "dtype": "float32"}, {"name": "501", "dtype": "float32"}, {"name": "502", "dtype": "float32"}, {"name": "503", "dtype": "float32"}, {"name": "504", "dtype": "float32"}, {"name": "505", "dtype": "float32"}, {"name": "506", "dtype": "float32"}, {"name": "507", "dtype": "float32"}, {"name": "508", "dtype": "float32"}, {"name": "509", "dtype": "float32"}, {"name": "510", "dtype": "float32"}, {"name": "511", "dtype": "float32"}, {"name": "512", "dtype": "float32"}, {"name": "513", "dtype": "float32"}, {"name": "514", "dtype": "float32"}, {"name": "515", "dtype": "float32"}, {"name": "516", "dtype": "float32"}, {"name": "517", "dtype": "float32"}, {"name": "518", "dtype": "float32"}, {"name": "519", "dtype": "float32"}, {"name": "520", "dtype": "float32"}, {"name": "521", "dtype": "float32"}, {"name": "522", "dtype": "float32"}, {"name": "523", "dtype": "float32"}, {"name": "524", "dtype": "float32"}, {"name": "525", "dtype": "float32"}, {"name": "526", "dtype": "float32"}, {"name": "527", "dtype": "float32"}, {"name": "528", "dtype": "float32"}, {"name": "529", "dtype": "float32"}, {"name": "530", "dtype": "float32"}, {"name": "531", "dtype": "float32"}, {"name": "532", "dtype": "float32"}, {"name": "533", "dtype": "float32"}, {"name": "534", "dtype": "float32"}, {"name": "535", "dtype": "float32"}, {"name": "536", "dtype": "float32"}, {"name": "537", "dtype": "float32"}, {"name": "538", "dtype": "float32"}, {"name": "539", "dtype": "float32"}, {"name": "540", "dtype": "float32"}, {"name": "541", "dtype": "float32"}, {"name": "542", "dtype": "float32"}, {"name": "543", "dtype": "float32"}, {"name": "544", "dtype": "float32"}, {"name": "545", "dtype": "float32"}, {"name": "546", "dtype": "float32"}, {"name": "547", "dtype": "float32"}, {"name": "548", "dtype": "float32"}, {"name": "549", "dtype": "float32"}, {"name": "550", "dtype": "float32"}, {"name": "551", "dtype": "float32"}, {"name": "552", "dtype": "float32"}, {"name": "553", "dtype": "float32"}, {"name": "554", "dtype": "float32"}, {"name": "555", "dtype": "float32"}, {"name": "556", "dtype": "float32"}, {"name": "557", "dtype": "float32"}, {"name": "558", "dtype": "float32"}, {"name": "559", "dtype": "float32"}, {"name": "560", "dtype": "float32"}, {"name": "561", "dtype": "float32"}, {"name": "562", "dtype": "float32"}, {"name": "563", "dtype": "float32"}, {"name": "564", "dtype": "float32"}, {"name": "565", "dtype": "float32"}, {"name": "566", "dtype": "float32"}, {"name": "567", "dtype": "float32"}, {"name": "568", "dtype": "float32"}, {"name": "569", "dtype": "float32"}, {"name": "570", "dtype": "float32"}, {"name": "571", "dtype": "float32"}, {"name": "572", "dtype": "float32"}, {"name": "573", "dtype": "float32"}, {"name": "574", "dtype": "float32"}, {"name": "575", "dtype": "float32"}, {"name": "576", "dtype": "float32"}, {"name": "577", "dtype": "float32"}, {"name": "578", "dtype": "float32"}, {"name": "579", "dtype": "float32"}, {"name": "580", "dtype": "float32"}, {"name": "581", "dtype": "float32"}, {"name": "582", "dtype": "float32"}, {"name": "583", "dtype": "float32"}, {"name": "584", "dtype": "float32"}, {"name": "585", "dtype": "float32"}, {"name": "586", "dtype": "float32"}, {"name": "587", "dtype": "float32"}, {"name": "588", "dtype": "float32"}, {"name": "589", "dtype": "float32"}, {"name": "590", "dtype": "float32"}, {"name": "591", "dtype": "float32"}, {"name": "592", "dtype": "float32"}, {"name": "593", "dtype": "float32"}, {"name": "594", "dtype": "float32"}, {"name": "595", "dtype": "float32"}, {"name": "596", "dtype": "float32"}, {"name": "597", "dtype": "float32"}, {"name": "598", "dtype": "float32"}, {"name": "599", "dtype": "float32"}, {"name": "600", "dtype": "float32"}, {"name": "601", "dtype": "float32"}, {"name": "602", "dtype": "float32"}, {"name": "603", "dtype": "float32"}, {"name": "604", "dtype": "float32"}, {"name": "605", "dtype": "float32"}, {"name": "606", "dtype": "float32"}, {"name": "607", "dtype": "float32"}, {"name": "608", "dtype": "float32"}, {"name": "609", "dtype": "float32"}, {"name": "610", "dtype": "float32"}, {"name": "611", "dtype": "float32"}, {"name": "612", "dtype": "float32"}, {"name": "613", "dtype": "float32"}, {"name": "614", "dtype": "float32"}, {"name": "615", "dtype": "float32"}, {"name": "616", "dtype": "float32"}, {"name": "617", "dtype": "float32"}, {"name": "618", "dtype": "float32"}, {"name": "619", "dtype": "float32"}, {"name": "620", "dtype": "float32"}, {"name": "621", "dtype": "float32"}, {"name": "622", "dtype": "float32"}, {"name": "623", "dtype": "float32"}, {"name": "624", "dtype": "float32"}, {"name": "625", "dtype": "float32"}, {"name": "626", "dtype": "float32"}, {"name": "627", "dtype": "float32"}, {"name": "628", "dtype": "float32"}, {"name": "629", "dtype": "float32"}, {"name": "630", "dtype": "float32"}, {"name": "631", "dtype": "float32"}, {"name": "632", "dtype": "float32"}, {"name": "633", "dtype": "float32"}, {"name": "634", "dtype": "float32"}, {"name": "635", "dtype": "float32"}, {"name": "636", "dtype": "float32"}, {"name": "637", "dtype": "float32"}, {"name": "638", "dtype": "float32"}, {"name": "639", "dtype": "float32"}, {"name": "640", "dtype": "float32"}, {"name": "641", "dtype": "float32"}, {"name": "642", "dtype": "float32"}, {"name": "643", "dtype": "float32"}, {"name": "644", "dtype": "float32"}, {"name": "645", "dtype": "float32"}, {"name": "646", "dtype": "float32"}, {"name": "647", "dtype": "float32"}, {"name": "648", "dtype": "float32"}, {"name": "649", "dtype": "float32"}, {"name": "650", "dtype": "float32"}, {"name": "651", "dtype": "float32"}, {"name": "652", "dtype": "float32"}, {"name": "653", "dtype": "float32"}, {"name": "654", "dtype": "float32"}, {"name": "655", "dtype": "float32"}, {"name": "656", "dtype": "float32"}, {"name": "657", "dtype": "float32"}, {"name": "658", "dtype": "float32"}, {"name": "659", "dtype": "float32"}, {"name": "660", "dtype": "float32"}, {"name": "661", "dtype": "float32"}, {"name": "662", "dtype": "float32"}, {"name": "663", "dtype": "float32"}, {"name": "664", "dtype": "float32"}, {"name": "665", "dtype": "float32"}, {"name": "666", "dtype": "float32"}, {"name": "667", "dtype": "float32"}, {"name": "668", "dtype": "float32"}, {"name": "669", "dtype": "float32"}, {"name": "670", "dtype": "float32"}, {"name": "671", "dtype": "float32"}, {"name": "672", "dtype": "float32"}, {"name": "673", "dtype": "float32"}, {"name": "674", "dtype": "float32"}, {"name": "675", "dtype": "float32"}, {"name": "676", "dtype": "float32"}, {"name": "677", "dtype": "float32"}, {"name": "678", "dtype": "float32"}, {"name": "679", "dtype": "float32"}, {"name": "680", "dtype": "float32"}, {"name": "681", "dtype": "float32"}, {"name": "682", "dtype": "float32"}, {"name": "683", "dtype": "float32"}, {"name": "684", "dtype": "float32"}, {"name": "685", "dtype": "float32"}, {"name": "686", "dtype": "float32"}, {"name": "687", "dtype": "float32"}, {"name": "688", "dtype": "float32"}, {"name": "689", "dtype": "float32"}, {"name": "690", "dtype": "float32"}, {"name": "691", "dtype": "float32"}, {"name": "692", "dtype": "float32"}, {"name": "693", "dtype": "float32"}, {"name": "694", "dtype": "float32"}, {"name": "695", "dtype": "float32"}, {"name": "696", "dtype": "float32"}, {"name": "697", "dtype": "float32"}, {"name": "698", "dtype": "float32"}, {"name": "699", "dtype": "float32"}, {"name": "700", "dtype": "float32"}, {"name": "701", "dtype": "float32"}, {"name": "702", "dtype": "float32"}, {"name": "703", "dtype": "float32"}, {"name": "704", "dtype": "float32"}, {"name": "705", "dtype": "float32"}, {"name": "706", "dtype": "float32"}, {"name": "707", "dtype": "float32"}, {"name": "708", "dtype": "float32"}, {"name": "709", "dtype": "float32"}, {"name": "710", "dtype": "float32"}, {"name": "711", "dtype": "float32"}, {"name": "712", "dtype": "float32"}, {"name": "713", "dtype": "float32"}, {"name": "714", "dtype": "float32"}, {"name": "715", "dtype": "float32"}, {"name": "716", "dtype": "float32"}, {"name": "717", "dtype": "float32"}, {"name": "718", "dtype": "float32"}, {"name": "719", "dtype": "float32"}, {"name": "720", "dtype": "float32"}, {"name": "721", "dtype": "float32"}, {"name": "722", "dtype": "float32"}, {"name": "723", "dtype": "float32"}, {"name": "724", "dtype": "float32"}, {"name": "725", "dtype": "float32"}, {"name": "726", "dtype": "float32"}, {"name": "727", "dtype": "float32"}, {"name": "728", "dtype": "float32"}, {"name": "729", "dtype": "float32"}, {"name": "730", "dtype": "float32"}, {"name": "731", "dtype": "float32"}, {"name": "732", "dtype": "float32"}, {"name": "733", "dtype": "float32"}, {"name": "734", "dtype": "float32"}, {"name": "735", "dtype": "float32"}, {"name": "736", "dtype": "float32"}, {"name": "737", "dtype": "float32"}, {"name": "738", "dtype": "float32"}, {"name": "739", "dtype": "float32"}, {"name": "740", "dtype": "float32"}, {"name": "741", "dtype": "float32"}, {"name": "742", "dtype": "float32"}, {"name": "743", "dtype": "float32"}, {"name": "744", "dtype": "float32"}, {"name": "745", "dtype": "float32"}, {"name": "746", "dtype": "float32"}, {"name": "747", "dtype": "float32"}, {"name": "748", "dtype": "float32"}, {"name": "749", "dtype": "float32"}, {"name": "750", "dtype": "float32"}, {"name": "751", "dtype": "float32"}, {"name": "752", "dtype": "float32"}, {"name": "753", "dtype": "float32"}, {"name": "754", "dtype": "float32"}, {"name": "755", "dtype": "float32"}, {"name": "756", "dtype": "float32"}, {"name": "757", "dtype": "float32"}, {"name": "758", "dtype": "float32"}, {"name": "759", "dtype": "float32"}, {"name": "760", "dtype": "float32"}, {"name": "761", "dtype": "float32"}, {"name": "762", "dtype": "float32"}, {"name": "763", "dtype": "float32"}, {"name": "764", "dtype": "float32"}, {"name": "765", "dtype": "float32"}, {"name": "766", "dtype": "float32"}, {"name": "767", "dtype": "float32"}, {"name": "label", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 115582709.0625, "num_examples": 37500}, {"name": "test", "num_bytes": 38527570.0, "num_examples": 12500}], "download_size": 211839200, "dataset_size": 154110279.0625}}
2023-08-23T05:24:55+00:00
[]
[]
TAGS #region-us
# Dataset Card for "BGL_GPT2_Finetuned" More Information needed
[ "# Dataset Card for \"BGL_GPT2_Finetuned\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"BGL_GPT2_Finetuned\"\n\nMore Information needed" ]
[ 6, 21 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"BGL_GPT2_Finetuned\"\n\nMore Information needed" ]
a85fa05c46a38989969d03c3480a7cdbc7f5e442
# Dataset Card for Dataset Name ## Dataset Description - **Homepage:** - **Repository:** - **Paper:** - **Leaderboard:** - **Point of Contact:** ### Dataset Summary This dataset card aims to be a base template for new datasets. It has been generated using [this raw template](https://github.com/huggingface/huggingface_hub/blob/main/src/huggingface_hub/templates/datasetcard_template.md?plain=1). ### Supported Tasks and leaderboards ### Languages English ## Dataset Structure categorized ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation 08/18/2023 ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerati0ons for Using the Data no illigal content ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information Apache 2.0 ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
Wrathfulreap/Wrath001
[ "license:apache-2.0", "region:us" ]
2023-08-18T15:43:26+00:00
{"license": "apache-2.0"}
2023-08-18T16:10:31+00:00
[]
[]
TAGS #license-apache-2.0 #region-us
# Dataset Card for Dataset Name ## Dataset Description - Homepage: - Repository: - Paper: - Leaderboard: - Point of Contact: ### Dataset Summary This dataset card aims to be a base template for new datasets. It has been generated using this raw template. ### Supported Tasks and leaderboards ### Languages English ## Dataset Structure categorized ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation 08/18/2023 ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerati0ons for Using the Data no illigal content ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information Apache 2.0 ### Contributions
[ "# Dataset Card for Dataset Name", "## Dataset Description\n\n- Homepage: \n- Repository: \n- Paper: \n- Leaderboard: \n- Point of Contact:", "### Dataset Summary\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "### Supported Tasks and leaderboards", "### Languages\n\nEnglish", "## Dataset Structure\ncategorized", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation\n08/18/2023", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerati0ons for Using the Data\nno illigal content", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information\n\nApache 2.0", "### Contributions" ]
[ "TAGS\n#license-apache-2.0 #region-us \n", "# Dataset Card for Dataset Name", "## Dataset Description\n\n- Homepage: \n- Repository: \n- Paper: \n- Leaderboard: \n- Point of Contact:", "### Dataset Summary\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "### Supported Tasks and leaderboards", "### Languages\n\nEnglish", "## Dataset Structure\ncategorized", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation\n08/18/2023", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerati0ons for Using the Data\nno illigal content", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information\n\nApache 2.0", "### Contributions" ]
[ 14, 8, 24, 32, 10, 5, 8, 6, 5, 5, 9, 7, 4, 10, 10, 5, 5, 9, 8, 14, 7, 8, 7, 5, 6, 9, 5 ]
[ "passage: TAGS\n#license-apache-2.0 #region-us \n# Dataset Card for Dataset Name## Dataset Description\n\n- Homepage: \n- Repository: \n- Paper: \n- Leaderboard: \n- Point of Contact:### Dataset Summary\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.### Supported Tasks and leaderboards### Languages\n\nEnglish## Dataset Structure\ncategorized### Data Instances### Data Fields### Data Splits## Dataset Creation\n08/18/2023### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerati0ons for Using the Data\nno illigal content### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information\n\nApache 2.0### Contributions" ]
790ef648355d64469f4dca92c45453348ad427f7
# Dataset Card for "dermatology_anomaly_detection_small" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
KhalfounMehdi/dermatology_anomaly_detection_small
[ "region:us" ]
2023-08-18T15:45:56+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 39432376.0, "num_examples": 498}], "download_size": 39436795, "dataset_size": 39432376.0}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}, {"config_name": "KhalfounMehdi--dermatology_anomaly_detection_small", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2023-08-18T15:51:49+00:00
[]
[]
TAGS #region-us
# Dataset Card for "dermatology_anomaly_detection_small" More Information needed
[ "# Dataset Card for \"dermatology_anomaly_detection_small\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"dermatology_anomaly_detection_small\"\n\nMore Information needed" ]
[ 6, 23 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"dermatology_anomaly_detection_small\"\n\nMore Information needed" ]
2f02cc332b111c9aeb9bc737a2492be554036fa0
# Dataset Card for "BGL_GPTNEO_Finetuned" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
EgilKarlsen/BGL_GPTNEO_Finetuned
[ "region:us" ]
2023-08-18T15:53:06+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "0", "dtype": "float32"}, {"name": "1", "dtype": "float32"}, {"name": "2", "dtype": "float32"}, {"name": "3", "dtype": "float32"}, {"name": "4", "dtype": "float32"}, {"name": "5", "dtype": "float32"}, {"name": "6", "dtype": "float32"}, {"name": "7", "dtype": "float32"}, {"name": "8", "dtype": "float32"}, {"name": "9", "dtype": "float32"}, {"name": "10", "dtype": "float32"}, {"name": "11", "dtype": "float32"}, {"name": "12", "dtype": "float32"}, {"name": "13", "dtype": "float32"}, {"name": "14", "dtype": "float32"}, {"name": "15", "dtype": "float32"}, {"name": "16", "dtype": "float32"}, {"name": "17", "dtype": "float32"}, {"name": "18", "dtype": "float32"}, {"name": "19", "dtype": "float32"}, {"name": "20", "dtype": "float32"}, {"name": "21", "dtype": "float32"}, {"name": "22", "dtype": "float32"}, {"name": "23", "dtype": "float32"}, {"name": "24", "dtype": "float32"}, {"name": "25", "dtype": "float32"}, {"name": "26", "dtype": "float32"}, {"name": "27", "dtype": "float32"}, {"name": "28", "dtype": "float32"}, {"name": "29", "dtype": "float32"}, {"name": "30", "dtype": "float32"}, {"name": "31", "dtype": "float32"}, {"name": "32", "dtype": "float32"}, {"name": "33", "dtype": "float32"}, {"name": "34", "dtype": "float32"}, {"name": "35", "dtype": "float32"}, {"name": "36", "dtype": "float32"}, {"name": "37", "dtype": "float32"}, {"name": "38", "dtype": "float32"}, {"name": "39", "dtype": "float32"}, {"name": "40", "dtype": "float32"}, {"name": "41", "dtype": "float32"}, {"name": "42", "dtype": "float32"}, {"name": "43", "dtype": "float32"}, {"name": "44", "dtype": "float32"}, {"name": "45", "dtype": "float32"}, {"name": "46", "dtype": "float32"}, {"name": "47", "dtype": "float32"}, {"name": "48", "dtype": "float32"}, {"name": "49", "dtype": "float32"}, {"name": "50", "dtype": "float32"}, {"name": "51", "dtype": "float32"}, {"name": "52", "dtype": "float32"}, {"name": "53", "dtype": "float32"}, {"name": "54", "dtype": "float32"}, {"name": "55", "dtype": "float32"}, {"name": "56", "dtype": "float32"}, {"name": "57", "dtype": "float32"}, {"name": "58", "dtype": "float32"}, {"name": "59", "dtype": "float32"}, {"name": "60", "dtype": "float32"}, {"name": "61", "dtype": "float32"}, {"name": "62", "dtype": "float32"}, {"name": "63", "dtype": "float32"}, {"name": "64", "dtype": "float32"}, {"name": "65", "dtype": "float32"}, {"name": "66", "dtype": "float32"}, {"name": "67", "dtype": "float32"}, {"name": "68", "dtype": "float32"}, {"name": "69", "dtype": "float32"}, {"name": "70", "dtype": "float32"}, {"name": "71", "dtype": "float32"}, {"name": "72", "dtype": "float32"}, {"name": "73", "dtype": "float32"}, {"name": "74", "dtype": "float32"}, {"name": "75", "dtype": "float32"}, {"name": "76", "dtype": "float32"}, {"name": "77", "dtype": "float32"}, {"name": "78", "dtype": "float32"}, {"name": "79", "dtype": "float32"}, {"name": "80", "dtype": "float32"}, {"name": "81", "dtype": "float32"}, {"name": "82", "dtype": "float32"}, {"name": "83", "dtype": "float32"}, {"name": "84", "dtype": "float32"}, {"name": "85", "dtype": "float32"}, {"name": "86", "dtype": "float32"}, {"name": "87", "dtype": "float32"}, {"name": "88", "dtype": "float32"}, {"name": "89", "dtype": "float32"}, {"name": "90", "dtype": "float32"}, {"name": "91", "dtype": "float32"}, {"name": "92", "dtype": "float32"}, {"name": "93", "dtype": "float32"}, {"name": "94", "dtype": "float32"}, {"name": "95", "dtype": "float32"}, {"name": "96", "dtype": "float32"}, {"name": "97", "dtype": "float32"}, {"name": "98", "dtype": "float32"}, {"name": "99", "dtype": "float32"}, {"name": "100", "dtype": "float32"}, {"name": "101", "dtype": "float32"}, {"name": "102", "dtype": "float32"}, {"name": "103", "dtype": "float32"}, {"name": "104", "dtype": "float32"}, {"name": "105", "dtype": "float32"}, {"name": "106", "dtype": "float32"}, {"name": "107", "dtype": "float32"}, {"name": "108", "dtype": "float32"}, {"name": "109", "dtype": "float32"}, {"name": "110", "dtype": "float32"}, {"name": "111", "dtype": "float32"}, {"name": "112", "dtype": "float32"}, {"name": "113", "dtype": "float32"}, {"name": "114", "dtype": "float32"}, {"name": "115", "dtype": "float32"}, {"name": "116", "dtype": "float32"}, {"name": "117", "dtype": "float32"}, {"name": "118", "dtype": "float32"}, {"name": "119", "dtype": "float32"}, {"name": "120", "dtype": "float32"}, {"name": "121", "dtype": "float32"}, {"name": "122", "dtype": "float32"}, {"name": "123", "dtype": "float32"}, {"name": "124", "dtype": "float32"}, {"name": "125", "dtype": "float32"}, {"name": "126", "dtype": "float32"}, {"name": "127", "dtype": "float32"}, {"name": "128", "dtype": "float32"}, {"name": "129", "dtype": "float32"}, {"name": "130", "dtype": "float32"}, {"name": "131", "dtype": "float32"}, {"name": "132", "dtype": "float32"}, {"name": "133", "dtype": "float32"}, {"name": "134", "dtype": "float32"}, {"name": "135", "dtype": "float32"}, {"name": "136", "dtype": "float32"}, {"name": "137", "dtype": "float32"}, {"name": "138", "dtype": "float32"}, {"name": "139", "dtype": "float32"}, {"name": "140", "dtype": "float32"}, {"name": "141", "dtype": "float32"}, {"name": "142", "dtype": "float32"}, {"name": "143", "dtype": "float32"}, {"name": "144", "dtype": "float32"}, {"name": "145", "dtype": "float32"}, {"name": "146", "dtype": "float32"}, {"name": "147", "dtype": "float32"}, {"name": "148", "dtype": "float32"}, {"name": "149", "dtype": "float32"}, {"name": "150", "dtype": "float32"}, {"name": "151", "dtype": "float32"}, {"name": "152", "dtype": "float32"}, {"name": "153", "dtype": "float32"}, {"name": "154", "dtype": "float32"}, {"name": "155", "dtype": "float32"}, {"name": "156", "dtype": "float32"}, {"name": "157", "dtype": "float32"}, {"name": "158", "dtype": "float32"}, {"name": "159", "dtype": "float32"}, {"name": "160", "dtype": "float32"}, {"name": "161", "dtype": "float32"}, {"name": "162", "dtype": "float32"}, {"name": "163", "dtype": "float32"}, {"name": "164", "dtype": "float32"}, {"name": "165", "dtype": "float32"}, {"name": "166", "dtype": "float32"}, {"name": "167", "dtype": "float32"}, {"name": "168", "dtype": "float32"}, {"name": "169", "dtype": "float32"}, {"name": "170", "dtype": "float32"}, {"name": "171", "dtype": "float32"}, {"name": "172", "dtype": "float32"}, {"name": "173", "dtype": "float32"}, {"name": "174", "dtype": "float32"}, {"name": "175", "dtype": "float32"}, {"name": "176", "dtype": "float32"}, {"name": "177", "dtype": "float32"}, {"name": "178", "dtype": "float32"}, {"name": "179", "dtype": "float32"}, {"name": "180", "dtype": "float32"}, {"name": "181", "dtype": "float32"}, {"name": "182", "dtype": "float32"}, {"name": "183", "dtype": "float32"}, {"name": "184", "dtype": "float32"}, {"name": "185", "dtype": "float32"}, {"name": "186", "dtype": "float32"}, {"name": "187", "dtype": "float32"}, {"name": "188", "dtype": "float32"}, {"name": "189", "dtype": "float32"}, {"name": "190", "dtype": "float32"}, {"name": "191", "dtype": "float32"}, {"name": "192", "dtype": "float32"}, {"name": "193", "dtype": "float32"}, {"name": "194", "dtype": "float32"}, {"name": "195", "dtype": "float32"}, {"name": "196", "dtype": "float32"}, {"name": "197", "dtype": "float32"}, {"name": "198", "dtype": "float32"}, {"name": "199", "dtype": "float32"}, {"name": "200", "dtype": "float32"}, {"name": "201", "dtype": "float32"}, {"name": "202", "dtype": "float32"}, {"name": "203", "dtype": "float32"}, {"name": "204", "dtype": "float32"}, {"name": "205", "dtype": "float32"}, {"name": "206", "dtype": "float32"}, {"name": "207", "dtype": "float32"}, {"name": "208", "dtype": "float32"}, {"name": "209", "dtype": "float32"}, {"name": "210", "dtype": "float32"}, {"name": "211", "dtype": "float32"}, {"name": "212", "dtype": "float32"}, {"name": "213", "dtype": "float32"}, {"name": "214", "dtype": "float32"}, {"name": "215", "dtype": "float32"}, {"name": "216", "dtype": "float32"}, {"name": "217", "dtype": "float32"}, {"name": "218", "dtype": "float32"}, {"name": "219", "dtype": "float32"}, {"name": "220", "dtype": "float32"}, {"name": "221", "dtype": "float32"}, {"name": "222", "dtype": "float32"}, {"name": "223", "dtype": "float32"}, {"name": "224", "dtype": "float32"}, {"name": "225", "dtype": "float32"}, {"name": "226", "dtype": "float32"}, {"name": "227", "dtype": "float32"}, {"name": "228", "dtype": "float32"}, {"name": "229", "dtype": "float32"}, {"name": "230", "dtype": "float32"}, {"name": "231", "dtype": "float32"}, {"name": "232", "dtype": "float32"}, {"name": "233", "dtype": "float32"}, {"name": "234", "dtype": "float32"}, {"name": "235", "dtype": "float32"}, {"name": "236", "dtype": "float32"}, {"name": "237", "dtype": "float32"}, {"name": "238", "dtype": "float32"}, {"name": "239", "dtype": "float32"}, {"name": "240", "dtype": "float32"}, {"name": "241", "dtype": "float32"}, {"name": "242", "dtype": "float32"}, {"name": "243", "dtype": "float32"}, {"name": "244", "dtype": "float32"}, {"name": "245", "dtype": "float32"}, {"name": "246", "dtype": "float32"}, {"name": "247", "dtype": "float32"}, {"name": "248", "dtype": "float32"}, {"name": "249", "dtype": "float32"}, {"name": "250", "dtype": "float32"}, {"name": "251", "dtype": "float32"}, {"name": "252", "dtype": "float32"}, {"name": "253", "dtype": "float32"}, {"name": "254", "dtype": "float32"}, {"name": "255", "dtype": "float32"}, {"name": "256", "dtype": "float32"}, {"name": "257", "dtype": "float32"}, {"name": "258", "dtype": "float32"}, {"name": "259", "dtype": "float32"}, {"name": "260", "dtype": "float32"}, {"name": "261", "dtype": "float32"}, {"name": "262", "dtype": "float32"}, {"name": "263", "dtype": "float32"}, {"name": "264", "dtype": "float32"}, {"name": "265", "dtype": "float32"}, {"name": "266", "dtype": "float32"}, {"name": "267", "dtype": "float32"}, {"name": "268", "dtype": "float32"}, {"name": "269", "dtype": "float32"}, {"name": "270", "dtype": "float32"}, {"name": "271", "dtype": "float32"}, {"name": "272", "dtype": "float32"}, {"name": "273", "dtype": "float32"}, {"name": "274", "dtype": "float32"}, {"name": "275", "dtype": "float32"}, {"name": "276", "dtype": "float32"}, {"name": "277", "dtype": "float32"}, {"name": "278", "dtype": "float32"}, {"name": "279", "dtype": "float32"}, {"name": "280", "dtype": "float32"}, {"name": "281", "dtype": "float32"}, {"name": "282", "dtype": "float32"}, {"name": "283", "dtype": "float32"}, {"name": "284", "dtype": "float32"}, {"name": "285", "dtype": "float32"}, {"name": "286", "dtype": "float32"}, {"name": "287", "dtype": "float32"}, {"name": "288", "dtype": "float32"}, {"name": "289", "dtype": "float32"}, {"name": "290", "dtype": "float32"}, {"name": "291", "dtype": "float32"}, {"name": "292", "dtype": "float32"}, {"name": "293", "dtype": "float32"}, {"name": "294", "dtype": "float32"}, {"name": "295", "dtype": "float32"}, {"name": "296", "dtype": "float32"}, {"name": "297", "dtype": "float32"}, {"name": "298", "dtype": "float32"}, {"name": "299", "dtype": "float32"}, {"name": "300", "dtype": "float32"}, {"name": "301", "dtype": "float32"}, {"name": "302", "dtype": "float32"}, {"name": "303", "dtype": "float32"}, {"name": "304", "dtype": "float32"}, {"name": "305", "dtype": "float32"}, {"name": "306", "dtype": "float32"}, {"name": "307", "dtype": "float32"}, {"name": "308", "dtype": "float32"}, {"name": "309", "dtype": "float32"}, {"name": "310", "dtype": "float32"}, {"name": "311", "dtype": "float32"}, {"name": "312", "dtype": "float32"}, {"name": "313", "dtype": "float32"}, {"name": "314", "dtype": "float32"}, {"name": "315", "dtype": "float32"}, {"name": "316", "dtype": "float32"}, {"name": "317", "dtype": "float32"}, {"name": "318", "dtype": "float32"}, {"name": "319", "dtype": "float32"}, {"name": "320", "dtype": "float32"}, {"name": "321", "dtype": "float32"}, {"name": "322", "dtype": "float32"}, {"name": "323", "dtype": "float32"}, {"name": "324", "dtype": "float32"}, {"name": "325", "dtype": "float32"}, {"name": "326", "dtype": "float32"}, {"name": "327", "dtype": "float32"}, {"name": "328", "dtype": "float32"}, {"name": "329", "dtype": "float32"}, {"name": "330", "dtype": "float32"}, {"name": "331", "dtype": "float32"}, {"name": "332", "dtype": "float32"}, {"name": "333", "dtype": "float32"}, {"name": "334", "dtype": "float32"}, {"name": "335", "dtype": "float32"}, {"name": "336", "dtype": "float32"}, {"name": "337", "dtype": "float32"}, {"name": "338", "dtype": "float32"}, {"name": "339", "dtype": "float32"}, {"name": "340", "dtype": "float32"}, {"name": "341", "dtype": "float32"}, {"name": "342", "dtype": "float32"}, {"name": "343", "dtype": "float32"}, {"name": "344", "dtype": "float32"}, {"name": "345", "dtype": "float32"}, {"name": "346", "dtype": "float32"}, {"name": "347", "dtype": "float32"}, {"name": "348", "dtype": "float32"}, {"name": "349", "dtype": "float32"}, {"name": "350", "dtype": "float32"}, {"name": "351", "dtype": "float32"}, {"name": "352", "dtype": "float32"}, {"name": "353", "dtype": "float32"}, {"name": "354", "dtype": "float32"}, {"name": "355", "dtype": "float32"}, {"name": "356", "dtype": "float32"}, {"name": "357", "dtype": "float32"}, {"name": "358", "dtype": "float32"}, {"name": "359", "dtype": "float32"}, {"name": "360", "dtype": "float32"}, {"name": "361", "dtype": "float32"}, {"name": "362", "dtype": "float32"}, {"name": "363", "dtype": "float32"}, {"name": "364", "dtype": "float32"}, {"name": "365", "dtype": "float32"}, {"name": "366", "dtype": "float32"}, {"name": "367", "dtype": "float32"}, {"name": "368", "dtype": "float32"}, {"name": "369", "dtype": "float32"}, {"name": "370", "dtype": "float32"}, {"name": "371", "dtype": "float32"}, {"name": "372", "dtype": "float32"}, {"name": "373", "dtype": "float32"}, {"name": "374", "dtype": "float32"}, {"name": "375", "dtype": "float32"}, {"name": "376", "dtype": "float32"}, {"name": "377", "dtype": "float32"}, {"name": "378", "dtype": "float32"}, {"name": "379", "dtype": "float32"}, {"name": "380", "dtype": "float32"}, {"name": "381", "dtype": "float32"}, {"name": "382", "dtype": "float32"}, {"name": "383", "dtype": "float32"}, {"name": "384", "dtype": "float32"}, {"name": "385", "dtype": "float32"}, {"name": "386", "dtype": "float32"}, {"name": "387", "dtype": "float32"}, {"name": "388", "dtype": "float32"}, {"name": "389", "dtype": "float32"}, {"name": "390", "dtype": "float32"}, {"name": "391", "dtype": "float32"}, {"name": "392", "dtype": "float32"}, {"name": "393", "dtype": "float32"}, {"name": "394", "dtype": "float32"}, {"name": "395", "dtype": "float32"}, {"name": "396", "dtype": "float32"}, {"name": "397", "dtype": "float32"}, {"name": "398", "dtype": "float32"}, {"name": "399", "dtype": "float32"}, {"name": "400", "dtype": "float32"}, {"name": "401", "dtype": "float32"}, {"name": "402", "dtype": "float32"}, {"name": "403", "dtype": "float32"}, {"name": "404", "dtype": "float32"}, {"name": "405", "dtype": "float32"}, {"name": "406", "dtype": "float32"}, {"name": "407", "dtype": "float32"}, {"name": "408", "dtype": "float32"}, {"name": "409", "dtype": "float32"}, {"name": "410", "dtype": "float32"}, {"name": "411", "dtype": "float32"}, {"name": "412", "dtype": "float32"}, {"name": "413", "dtype": "float32"}, {"name": "414", "dtype": "float32"}, {"name": "415", "dtype": "float32"}, {"name": "416", "dtype": "float32"}, {"name": "417", "dtype": "float32"}, {"name": "418", "dtype": "float32"}, {"name": "419", "dtype": "float32"}, {"name": "420", "dtype": "float32"}, {"name": "421", "dtype": "float32"}, {"name": "422", "dtype": "float32"}, {"name": "423", "dtype": "float32"}, {"name": "424", "dtype": "float32"}, {"name": "425", "dtype": "float32"}, {"name": "426", "dtype": "float32"}, {"name": "427", "dtype": "float32"}, {"name": "428", "dtype": "float32"}, {"name": "429", "dtype": "float32"}, {"name": "430", "dtype": "float32"}, {"name": "431", "dtype": "float32"}, {"name": "432", "dtype": "float32"}, {"name": "433", "dtype": "float32"}, {"name": "434", "dtype": "float32"}, {"name": "435", "dtype": "float32"}, {"name": "436", "dtype": "float32"}, {"name": "437", "dtype": "float32"}, {"name": "438", "dtype": "float32"}, {"name": "439", "dtype": "float32"}, {"name": "440", "dtype": "float32"}, {"name": "441", "dtype": "float32"}, {"name": "442", "dtype": "float32"}, {"name": "443", "dtype": "float32"}, {"name": "444", "dtype": "float32"}, {"name": "445", "dtype": "float32"}, {"name": "446", "dtype": "float32"}, {"name": "447", "dtype": "float32"}, {"name": "448", "dtype": "float32"}, {"name": "449", "dtype": "float32"}, {"name": "450", "dtype": "float32"}, {"name": "451", "dtype": "float32"}, {"name": "452", "dtype": "float32"}, {"name": "453", "dtype": "float32"}, {"name": "454", "dtype": "float32"}, {"name": "455", "dtype": "float32"}, {"name": "456", "dtype": "float32"}, {"name": "457", "dtype": "float32"}, {"name": "458", "dtype": "float32"}, {"name": "459", "dtype": "float32"}, {"name": "460", "dtype": "float32"}, {"name": "461", "dtype": "float32"}, {"name": "462", "dtype": "float32"}, {"name": "463", "dtype": "float32"}, {"name": "464", "dtype": "float32"}, {"name": "465", "dtype": "float32"}, {"name": "466", "dtype": "float32"}, {"name": "467", "dtype": "float32"}, {"name": "468", "dtype": "float32"}, {"name": "469", "dtype": "float32"}, {"name": "470", "dtype": "float32"}, {"name": "471", "dtype": "float32"}, {"name": "472", "dtype": "float32"}, {"name": "473", "dtype": "float32"}, {"name": "474", "dtype": "float32"}, {"name": "475", "dtype": "float32"}, {"name": "476", "dtype": "float32"}, {"name": "477", "dtype": "float32"}, {"name": "478", "dtype": "float32"}, {"name": "479", "dtype": "float32"}, {"name": "480", "dtype": "float32"}, {"name": "481", "dtype": "float32"}, {"name": "482", "dtype": "float32"}, {"name": "483", "dtype": "float32"}, {"name": "484", "dtype": "float32"}, {"name": "485", "dtype": "float32"}, {"name": "486", "dtype": "float32"}, {"name": "487", "dtype": "float32"}, {"name": "488", "dtype": "float32"}, {"name": "489", "dtype": "float32"}, {"name": "490", "dtype": "float32"}, {"name": "491", "dtype": "float32"}, {"name": "492", "dtype": "float32"}, {"name": "493", "dtype": "float32"}, {"name": "494", "dtype": "float32"}, {"name": "495", "dtype": "float32"}, {"name": "496", "dtype": "float32"}, {"name": "497", "dtype": "float32"}, {"name": "498", "dtype": "float32"}, {"name": "499", "dtype": "float32"}, {"name": "500", "dtype": "float32"}, {"name": "501", "dtype": "float32"}, {"name": "502", "dtype": "float32"}, {"name": "503", "dtype": "float32"}, {"name": "504", "dtype": "float32"}, {"name": "505", "dtype": "float32"}, {"name": "506", "dtype": "float32"}, {"name": "507", "dtype": "float32"}, {"name": "508", "dtype": "float32"}, {"name": "509", "dtype": "float32"}, {"name": "510", "dtype": "float32"}, {"name": "511", "dtype": "float32"}, {"name": "512", "dtype": "float32"}, {"name": "513", "dtype": "float32"}, {"name": "514", "dtype": "float32"}, {"name": "515", "dtype": "float32"}, {"name": "516", "dtype": "float32"}, {"name": "517", "dtype": "float32"}, {"name": "518", "dtype": "float32"}, {"name": "519", "dtype": "float32"}, {"name": "520", "dtype": "float32"}, {"name": "521", "dtype": "float32"}, {"name": "522", "dtype": "float32"}, {"name": "523", "dtype": "float32"}, {"name": "524", "dtype": "float32"}, {"name": "525", "dtype": "float32"}, {"name": "526", "dtype": "float32"}, {"name": "527", "dtype": "float32"}, {"name": "528", "dtype": "float32"}, {"name": "529", "dtype": "float32"}, {"name": "530", "dtype": "float32"}, {"name": "531", "dtype": "float32"}, {"name": "532", "dtype": "float32"}, {"name": "533", "dtype": "float32"}, {"name": "534", "dtype": "float32"}, {"name": "535", "dtype": "float32"}, {"name": "536", "dtype": "float32"}, {"name": "537", "dtype": "float32"}, {"name": "538", "dtype": "float32"}, {"name": "539", "dtype": "float32"}, {"name": "540", "dtype": "float32"}, {"name": "541", "dtype": "float32"}, {"name": "542", "dtype": "float32"}, {"name": "543", "dtype": "float32"}, {"name": "544", "dtype": "float32"}, {"name": "545", "dtype": "float32"}, {"name": "546", "dtype": "float32"}, {"name": "547", "dtype": "float32"}, {"name": "548", "dtype": "float32"}, {"name": "549", "dtype": "float32"}, {"name": "550", "dtype": "float32"}, {"name": "551", "dtype": "float32"}, {"name": "552", "dtype": "float32"}, {"name": "553", "dtype": "float32"}, {"name": "554", "dtype": "float32"}, {"name": "555", "dtype": "float32"}, {"name": "556", "dtype": "float32"}, {"name": "557", "dtype": "float32"}, {"name": "558", "dtype": "float32"}, {"name": "559", "dtype": "float32"}, {"name": "560", "dtype": "float32"}, {"name": "561", "dtype": "float32"}, {"name": "562", "dtype": "float32"}, {"name": "563", "dtype": "float32"}, {"name": "564", "dtype": "float32"}, {"name": "565", "dtype": "float32"}, {"name": "566", "dtype": "float32"}, {"name": "567", "dtype": "float32"}, {"name": "568", "dtype": "float32"}, {"name": "569", "dtype": "float32"}, {"name": "570", "dtype": "float32"}, {"name": "571", "dtype": "float32"}, {"name": "572", "dtype": "float32"}, {"name": "573", "dtype": "float32"}, {"name": "574", "dtype": "float32"}, {"name": "575", "dtype": "float32"}, {"name": "576", "dtype": "float32"}, {"name": "577", "dtype": "float32"}, {"name": "578", "dtype": "float32"}, {"name": "579", "dtype": "float32"}, {"name": "580", "dtype": "float32"}, {"name": "581", "dtype": "float32"}, {"name": "582", "dtype": "float32"}, {"name": "583", "dtype": "float32"}, {"name": "584", "dtype": "float32"}, {"name": "585", "dtype": "float32"}, {"name": "586", "dtype": "float32"}, {"name": "587", "dtype": "float32"}, {"name": "588", "dtype": "float32"}, {"name": "589", "dtype": "float32"}, {"name": "590", "dtype": "float32"}, {"name": "591", "dtype": "float32"}, {"name": "592", "dtype": "float32"}, {"name": "593", "dtype": "float32"}, {"name": "594", "dtype": "float32"}, {"name": "595", "dtype": "float32"}, {"name": "596", "dtype": "float32"}, {"name": "597", "dtype": "float32"}, {"name": "598", "dtype": "float32"}, {"name": "599", "dtype": "float32"}, {"name": "600", "dtype": "float32"}, {"name": "601", "dtype": "float32"}, {"name": "602", "dtype": "float32"}, {"name": "603", "dtype": "float32"}, {"name": "604", "dtype": "float32"}, {"name": "605", "dtype": "float32"}, {"name": "606", "dtype": "float32"}, {"name": "607", "dtype": "float32"}, {"name": "608", "dtype": "float32"}, {"name": "609", "dtype": "float32"}, {"name": "610", "dtype": "float32"}, {"name": "611", "dtype": "float32"}, {"name": "612", "dtype": "float32"}, {"name": "613", "dtype": "float32"}, {"name": "614", "dtype": "float32"}, {"name": "615", "dtype": "float32"}, {"name": "616", "dtype": "float32"}, {"name": "617", "dtype": "float32"}, {"name": "618", "dtype": "float32"}, {"name": "619", "dtype": "float32"}, {"name": "620", "dtype": "float32"}, {"name": "621", "dtype": "float32"}, {"name": "622", "dtype": "float32"}, {"name": "623", "dtype": "float32"}, {"name": "624", "dtype": "float32"}, {"name": "625", "dtype": "float32"}, {"name": "626", "dtype": "float32"}, {"name": "627", "dtype": "float32"}, {"name": "628", "dtype": "float32"}, {"name": "629", "dtype": "float32"}, {"name": "630", "dtype": "float32"}, {"name": "631", "dtype": "float32"}, {"name": "632", "dtype": "float32"}, {"name": "633", "dtype": "float32"}, {"name": "634", "dtype": "float32"}, {"name": "635", "dtype": "float32"}, {"name": "636", "dtype": "float32"}, {"name": "637", "dtype": "float32"}, {"name": "638", "dtype": "float32"}, {"name": "639", "dtype": "float32"}, {"name": "640", "dtype": "float32"}, {"name": "641", "dtype": "float32"}, {"name": "642", "dtype": "float32"}, {"name": "643", "dtype": "float32"}, {"name": "644", "dtype": "float32"}, {"name": "645", "dtype": "float32"}, {"name": "646", "dtype": "float32"}, {"name": "647", "dtype": "float32"}, {"name": "648", "dtype": "float32"}, {"name": "649", "dtype": "float32"}, {"name": "650", "dtype": "float32"}, {"name": "651", "dtype": "float32"}, {"name": "652", "dtype": "float32"}, {"name": "653", "dtype": "float32"}, {"name": "654", "dtype": "float32"}, {"name": "655", "dtype": "float32"}, {"name": "656", "dtype": "float32"}, {"name": "657", "dtype": "float32"}, {"name": "658", "dtype": "float32"}, {"name": "659", "dtype": "float32"}, {"name": "660", "dtype": "float32"}, {"name": "661", "dtype": "float32"}, {"name": "662", "dtype": "float32"}, {"name": "663", "dtype": "float32"}, {"name": "664", "dtype": "float32"}, {"name": "665", "dtype": "float32"}, {"name": "666", "dtype": "float32"}, {"name": "667", "dtype": "float32"}, {"name": "668", "dtype": "float32"}, {"name": "669", "dtype": "float32"}, {"name": "670", "dtype": "float32"}, {"name": "671", "dtype": "float32"}, {"name": "672", "dtype": "float32"}, {"name": "673", "dtype": "float32"}, {"name": "674", "dtype": "float32"}, {"name": "675", "dtype": "float32"}, {"name": "676", "dtype": "float32"}, {"name": "677", "dtype": "float32"}, {"name": "678", "dtype": "float32"}, {"name": "679", "dtype": "float32"}, {"name": "680", "dtype": "float32"}, {"name": "681", "dtype": "float32"}, {"name": "682", "dtype": "float32"}, {"name": "683", "dtype": "float32"}, {"name": "684", "dtype": "float32"}, {"name": "685", "dtype": "float32"}, {"name": "686", "dtype": "float32"}, {"name": "687", "dtype": "float32"}, {"name": "688", "dtype": "float32"}, {"name": "689", "dtype": "float32"}, {"name": "690", "dtype": "float32"}, {"name": "691", "dtype": "float32"}, {"name": "692", "dtype": "float32"}, {"name": "693", "dtype": "float32"}, {"name": "694", "dtype": "float32"}, {"name": "695", "dtype": "float32"}, {"name": "696", "dtype": "float32"}, {"name": "697", "dtype": "float32"}, {"name": "698", "dtype": "float32"}, {"name": "699", "dtype": "float32"}, {"name": "700", "dtype": "float32"}, {"name": "701", "dtype": "float32"}, {"name": "702", "dtype": "float32"}, {"name": "703", "dtype": "float32"}, {"name": "704", "dtype": "float32"}, {"name": "705", "dtype": "float32"}, {"name": "706", "dtype": "float32"}, {"name": "707", "dtype": "float32"}, {"name": "708", "dtype": "float32"}, {"name": "709", "dtype": "float32"}, {"name": "710", "dtype": "float32"}, {"name": "711", "dtype": "float32"}, {"name": "712", "dtype": "float32"}, {"name": "713", "dtype": "float32"}, {"name": "714", "dtype": "float32"}, {"name": "715", "dtype": "float32"}, {"name": "716", "dtype": "float32"}, {"name": "717", "dtype": "float32"}, {"name": "718", "dtype": "float32"}, {"name": "719", "dtype": "float32"}, {"name": "720", "dtype": "float32"}, {"name": "721", "dtype": "float32"}, {"name": "722", "dtype": "float32"}, {"name": "723", "dtype": "float32"}, {"name": "724", "dtype": "float32"}, {"name": "725", "dtype": "float32"}, {"name": "726", "dtype": "float32"}, {"name": "727", "dtype": "float32"}, {"name": "728", "dtype": "float32"}, {"name": "729", "dtype": "float32"}, {"name": "730", "dtype": "float32"}, {"name": "731", "dtype": "float32"}, {"name": "732", "dtype": "float32"}, {"name": "733", "dtype": "float32"}, {"name": "734", "dtype": "float32"}, {"name": "735", "dtype": "float32"}, {"name": "736", "dtype": "float32"}, {"name": "737", "dtype": "float32"}, {"name": "738", "dtype": "float32"}, {"name": "739", "dtype": "float32"}, {"name": "740", "dtype": "float32"}, {"name": "741", "dtype": "float32"}, {"name": "742", "dtype": "float32"}, {"name": "743", "dtype": "float32"}, {"name": "744", "dtype": "float32"}, {"name": "745", "dtype": "float32"}, {"name": "746", "dtype": "float32"}, {"name": "747", "dtype": "float32"}, {"name": "748", "dtype": "float32"}, {"name": "749", "dtype": "float32"}, {"name": "750", "dtype": "float32"}, {"name": "751", "dtype": "float32"}, {"name": "752", "dtype": "float32"}, {"name": "753", "dtype": "float32"}, {"name": "754", "dtype": "float32"}, {"name": "755", "dtype": "float32"}, {"name": "756", "dtype": "float32"}, {"name": "757", "dtype": "float32"}, {"name": "758", "dtype": "float32"}, {"name": "759", "dtype": "float32"}, {"name": "760", "dtype": "float32"}, {"name": "761", "dtype": "float32"}, {"name": "762", "dtype": "float32"}, {"name": "763", "dtype": "float32"}, {"name": "764", "dtype": "float32"}, {"name": "765", "dtype": "float32"}, {"name": "766", "dtype": "float32"}, {"name": "767", "dtype": "float32"}, {"name": "768", "dtype": "float32"}, {"name": "769", "dtype": "float32"}, {"name": "770", "dtype": "float32"}, {"name": "771", "dtype": "float32"}, {"name": "772", "dtype": "float32"}, {"name": "773", "dtype": "float32"}, {"name": "774", "dtype": "float32"}, {"name": "775", "dtype": "float32"}, {"name": "776", "dtype": "float32"}, {"name": "777", "dtype": "float32"}, {"name": "778", "dtype": "float32"}, {"name": "779", "dtype": "float32"}, {"name": "780", "dtype": "float32"}, {"name": "781", "dtype": "float32"}, {"name": "782", "dtype": "float32"}, {"name": "783", "dtype": "float32"}, {"name": "784", "dtype": "float32"}, {"name": "785", "dtype": "float32"}, {"name": "786", "dtype": "float32"}, {"name": "787", "dtype": "float32"}, {"name": "788", "dtype": "float32"}, {"name": "789", "dtype": "float32"}, {"name": "790", "dtype": "float32"}, {"name": "791", "dtype": "float32"}, {"name": "792", "dtype": "float32"}, {"name": "793", "dtype": "float32"}, {"name": "794", "dtype": "float32"}, {"name": "795", "dtype": "float32"}, {"name": "796", "dtype": "float32"}, {"name": "797", "dtype": "float32"}, {"name": "798", "dtype": "float32"}, {"name": "799", "dtype": "float32"}, {"name": "800", "dtype": "float32"}, {"name": "801", "dtype": "float32"}, {"name": "802", "dtype": "float32"}, {"name": "803", "dtype": "float32"}, {"name": "804", "dtype": "float32"}, {"name": "805", "dtype": "float32"}, {"name": "806", "dtype": "float32"}, {"name": "807", "dtype": "float32"}, {"name": "808", "dtype": "float32"}, {"name": "809", "dtype": "float32"}, {"name": "810", "dtype": "float32"}, {"name": "811", "dtype": "float32"}, {"name": "812", "dtype": "float32"}, {"name": "813", "dtype": "float32"}, {"name": "814", "dtype": "float32"}, {"name": "815", "dtype": "float32"}, {"name": "816", "dtype": "float32"}, {"name": "817", "dtype": "float32"}, {"name": "818", "dtype": "float32"}, {"name": "819", "dtype": "float32"}, {"name": "820", "dtype": "float32"}, {"name": "821", "dtype": "float32"}, {"name": "822", "dtype": "float32"}, {"name": "823", "dtype": "float32"}, {"name": "824", "dtype": "float32"}, {"name": "825", "dtype": "float32"}, {"name": "826", "dtype": "float32"}, {"name": "827", "dtype": "float32"}, {"name": "828", "dtype": "float32"}, {"name": "829", "dtype": "float32"}, {"name": "830", "dtype": "float32"}, {"name": "831", "dtype": "float32"}, {"name": "832", "dtype": "float32"}, {"name": "833", "dtype": "float32"}, {"name": "834", "dtype": "float32"}, {"name": "835", "dtype": "float32"}, {"name": "836", "dtype": "float32"}, {"name": "837", "dtype": "float32"}, {"name": "838", "dtype": "float32"}, {"name": "839", "dtype": "float32"}, {"name": "840", "dtype": "float32"}, {"name": "841", "dtype": "float32"}, {"name": "842", "dtype": "float32"}, {"name": "843", "dtype": "float32"}, {"name": "844", "dtype": "float32"}, {"name": "845", "dtype": "float32"}, {"name": "846", "dtype": "float32"}, {"name": "847", "dtype": "float32"}, {"name": "848", "dtype": "float32"}, {"name": "849", "dtype": "float32"}, {"name": "850", "dtype": "float32"}, {"name": "851", "dtype": "float32"}, {"name": "852", "dtype": "float32"}, {"name": "853", "dtype": "float32"}, {"name": "854", "dtype": "float32"}, {"name": "855", "dtype": "float32"}, {"name": "856", "dtype": "float32"}, {"name": "857", "dtype": "float32"}, {"name": "858", "dtype": "float32"}, {"name": "859", "dtype": "float32"}, {"name": "860", "dtype": "float32"}, {"name": "861", "dtype": "float32"}, {"name": "862", "dtype": "float32"}, {"name": "863", "dtype": "float32"}, {"name": "864", "dtype": "float32"}, {"name": "865", "dtype": "float32"}, {"name": "866", "dtype": "float32"}, {"name": "867", "dtype": "float32"}, {"name": "868", "dtype": "float32"}, {"name": "869", "dtype": "float32"}, {"name": "870", "dtype": "float32"}, {"name": "871", "dtype": "float32"}, {"name": "872", "dtype": "float32"}, {"name": "873", "dtype": "float32"}, {"name": "874", "dtype": "float32"}, {"name": "875", "dtype": "float32"}, {"name": "876", "dtype": "float32"}, {"name": "877", "dtype": "float32"}, {"name": "878", "dtype": "float32"}, {"name": "879", "dtype": "float32"}, {"name": "880", "dtype": "float32"}, {"name": "881", "dtype": "float32"}, {"name": "882", "dtype": "float32"}, {"name": "883", "dtype": "float32"}, {"name": "884", "dtype": "float32"}, {"name": "885", "dtype": "float32"}, {"name": "886", "dtype": "float32"}, {"name": "887", "dtype": "float32"}, {"name": "888", "dtype": "float32"}, {"name": "889", "dtype": "float32"}, {"name": "890", "dtype": "float32"}, {"name": "891", "dtype": "float32"}, {"name": "892", "dtype": "float32"}, {"name": "893", "dtype": "float32"}, {"name": "894", "dtype": "float32"}, {"name": "895", "dtype": "float32"}, {"name": "896", "dtype": "float32"}, {"name": "897", "dtype": "float32"}, {"name": "898", "dtype": "float32"}, {"name": "899", "dtype": "float32"}, {"name": "900", "dtype": "float32"}, {"name": "901", "dtype": "float32"}, {"name": "902", "dtype": "float32"}, {"name": "903", "dtype": "float32"}, {"name": "904", "dtype": "float32"}, {"name": "905", "dtype": "float32"}, {"name": "906", "dtype": "float32"}, {"name": "907", "dtype": "float32"}, {"name": "908", "dtype": "float32"}, {"name": "909", "dtype": "float32"}, {"name": "910", "dtype": "float32"}, {"name": "911", "dtype": "float32"}, {"name": "912", "dtype": "float32"}, {"name": "913", "dtype": "float32"}, {"name": "914", "dtype": "float32"}, {"name": "915", "dtype": "float32"}, {"name": "916", "dtype": "float32"}, {"name": "917", "dtype": "float32"}, {"name": "918", "dtype": "float32"}, {"name": "919", "dtype": "float32"}, {"name": "920", "dtype": "float32"}, {"name": "921", "dtype": "float32"}, {"name": "922", "dtype": "float32"}, {"name": "923", "dtype": "float32"}, {"name": "924", "dtype": "float32"}, {"name": "925", "dtype": "float32"}, {"name": "926", "dtype": "float32"}, {"name": "927", "dtype": "float32"}, {"name": "928", "dtype": "float32"}, {"name": "929", "dtype": "float32"}, {"name": "930", "dtype": "float32"}, {"name": "931", "dtype": "float32"}, {"name": "932", "dtype": "float32"}, {"name": "933", "dtype": "float32"}, {"name": "934", "dtype": "float32"}, {"name": "935", "dtype": "float32"}, {"name": "936", "dtype": "float32"}, {"name": "937", "dtype": "float32"}, {"name": "938", "dtype": "float32"}, {"name": "939", "dtype": "float32"}, {"name": "940", "dtype": "float32"}, {"name": "941", "dtype": "float32"}, {"name": "942", "dtype": "float32"}, {"name": "943", "dtype": "float32"}, {"name": "944", "dtype": "float32"}, {"name": "945", "dtype": "float32"}, {"name": "946", "dtype": "float32"}, {"name": "947", "dtype": "float32"}, {"name": "948", "dtype": "float32"}, {"name": "949", "dtype": "float32"}, {"name": "950", "dtype": "float32"}, {"name": "951", "dtype": "float32"}, {"name": "952", "dtype": "float32"}, {"name": "953", "dtype": "float32"}, {"name": "954", "dtype": "float32"}, {"name": "955", "dtype": "float32"}, {"name": "956", "dtype": "float32"}, {"name": "957", "dtype": "float32"}, {"name": "958", "dtype": "float32"}, {"name": "959", "dtype": "float32"}, {"name": "960", "dtype": "float32"}, {"name": "961", "dtype": "float32"}, {"name": "962", "dtype": "float32"}, {"name": "963", "dtype": "float32"}, {"name": "964", "dtype": "float32"}, {"name": "965", "dtype": "float32"}, {"name": "966", "dtype": "float32"}, {"name": "967", "dtype": "float32"}, {"name": "968", "dtype": "float32"}, {"name": "969", "dtype": "float32"}, {"name": "970", "dtype": "float32"}, {"name": "971", "dtype": "float32"}, {"name": "972", "dtype": "float32"}, {"name": "973", "dtype": "float32"}, {"name": "974", "dtype": "float32"}, {"name": "975", "dtype": "float32"}, {"name": "976", "dtype": "float32"}, {"name": "977", "dtype": "float32"}, {"name": "978", "dtype": "float32"}, {"name": "979", "dtype": "float32"}, {"name": "980", "dtype": "float32"}, {"name": "981", "dtype": "float32"}, {"name": "982", "dtype": "float32"}, {"name": "983", "dtype": "float32"}, {"name": "984", "dtype": "float32"}, {"name": "985", "dtype": "float32"}, {"name": "986", "dtype": "float32"}, {"name": "987", "dtype": "float32"}, {"name": "988", "dtype": "float32"}, {"name": "989", "dtype": "float32"}, {"name": "990", "dtype": "float32"}, {"name": "991", "dtype": "float32"}, {"name": "992", "dtype": "float32"}, {"name": "993", "dtype": "float32"}, {"name": "994", "dtype": "float32"}, {"name": "995", "dtype": "float32"}, {"name": "996", "dtype": "float32"}, {"name": "997", "dtype": "float32"}, {"name": "998", "dtype": "float32"}, {"name": "999", "dtype": "float32"}, {"name": "1000", "dtype": "float32"}, {"name": "1001", "dtype": "float32"}, {"name": "1002", "dtype": "float32"}, {"name": "1003", "dtype": "float32"}, {"name": "1004", "dtype": "float32"}, {"name": "1005", "dtype": "float32"}, {"name": "1006", "dtype": "float32"}, {"name": "1007", "dtype": "float32"}, {"name": "1008", "dtype": "float32"}, {"name": "1009", "dtype": "float32"}, {"name": "1010", "dtype": "float32"}, {"name": "1011", "dtype": "float32"}, {"name": "1012", "dtype": "float32"}, {"name": "1013", "dtype": "float32"}, {"name": "1014", "dtype": "float32"}, {"name": "1015", "dtype": "float32"}, {"name": "1016", "dtype": "float32"}, {"name": "1017", "dtype": "float32"}, {"name": "1018", "dtype": "float32"}, {"name": "1019", "dtype": "float32"}, {"name": "1020", "dtype": "float32"}, {"name": "1021", "dtype": "float32"}, {"name": "1022", "dtype": "float32"}, {"name": "1023", "dtype": "float32"}, {"name": "1024", "dtype": "float32"}, {"name": "1025", "dtype": "float32"}, {"name": "1026", "dtype": "float32"}, {"name": "1027", "dtype": "float32"}, {"name": "1028", "dtype": "float32"}, {"name": "1029", "dtype": "float32"}, {"name": "1030", "dtype": "float32"}, {"name": "1031", "dtype": "float32"}, {"name": "1032", "dtype": "float32"}, {"name": "1033", "dtype": "float32"}, {"name": "1034", "dtype": "float32"}, {"name": "1035", "dtype": "float32"}, {"name": "1036", "dtype": "float32"}, {"name": "1037", "dtype": "float32"}, {"name": "1038", "dtype": "float32"}, {"name": "1039", "dtype": "float32"}, {"name": "1040", "dtype": "float32"}, {"name": "1041", "dtype": "float32"}, {"name": "1042", "dtype": "float32"}, {"name": "1043", "dtype": "float32"}, {"name": "1044", "dtype": "float32"}, {"name": "1045", "dtype": "float32"}, {"name": "1046", "dtype": "float32"}, {"name": "1047", "dtype": "float32"}, {"name": "1048", "dtype": "float32"}, {"name": "1049", "dtype": "float32"}, {"name": "1050", "dtype": "float32"}, {"name": "1051", "dtype": "float32"}, {"name": "1052", "dtype": "float32"}, {"name": "1053", "dtype": "float32"}, {"name": "1054", "dtype": "float32"}, {"name": "1055", "dtype": "float32"}, {"name": "1056", "dtype": "float32"}, {"name": "1057", "dtype": "float32"}, {"name": "1058", "dtype": "float32"}, {"name": "1059", "dtype": "float32"}, {"name": "1060", "dtype": "float32"}, {"name": "1061", "dtype": "float32"}, {"name": "1062", "dtype": "float32"}, {"name": "1063", "dtype": "float32"}, {"name": "1064", "dtype": "float32"}, {"name": "1065", "dtype": "float32"}, {"name": "1066", "dtype": "float32"}, {"name": "1067", "dtype": "float32"}, {"name": "1068", "dtype": "float32"}, {"name": "1069", "dtype": "float32"}, {"name": "1070", "dtype": "float32"}, {"name": "1071", "dtype": "float32"}, {"name": "1072", "dtype": "float32"}, {"name": "1073", "dtype": "float32"}, {"name": "1074", "dtype": "float32"}, {"name": "1075", "dtype": "float32"}, {"name": "1076", "dtype": "float32"}, {"name": "1077", "dtype": "float32"}, {"name": "1078", "dtype": "float32"}, {"name": "1079", "dtype": "float32"}, {"name": "1080", "dtype": "float32"}, {"name": "1081", "dtype": "float32"}, {"name": "1082", "dtype": "float32"}, {"name": "1083", "dtype": "float32"}, {"name": "1084", "dtype": "float32"}, {"name": "1085", "dtype": "float32"}, {"name": "1086", "dtype": "float32"}, {"name": "1087", "dtype": "float32"}, {"name": "1088", "dtype": "float32"}, {"name": "1089", "dtype": "float32"}, {"name": "1090", "dtype": "float32"}, {"name": "1091", "dtype": "float32"}, {"name": "1092", "dtype": "float32"}, {"name": "1093", "dtype": "float32"}, {"name": "1094", "dtype": "float32"}, {"name": "1095", "dtype": "float32"}, {"name": "1096", "dtype": "float32"}, {"name": "1097", "dtype": "float32"}, {"name": "1098", "dtype": "float32"}, {"name": "1099", "dtype": "float32"}, {"name": "1100", "dtype": "float32"}, {"name": "1101", "dtype": "float32"}, {"name": "1102", "dtype": "float32"}, {"name": "1103", "dtype": "float32"}, {"name": "1104", "dtype": "float32"}, {"name": "1105", "dtype": "float32"}, {"name": "1106", "dtype": "float32"}, {"name": "1107", "dtype": "float32"}, {"name": "1108", "dtype": "float32"}, {"name": "1109", "dtype": "float32"}, {"name": "1110", "dtype": "float32"}, {"name": "1111", "dtype": "float32"}, {"name": "1112", "dtype": "float32"}, {"name": "1113", "dtype": "float32"}, {"name": "1114", "dtype": "float32"}, {"name": "1115", "dtype": "float32"}, {"name": "1116", "dtype": "float32"}, {"name": "1117", "dtype": "float32"}, {"name": "1118", "dtype": "float32"}, {"name": "1119", "dtype": "float32"}, {"name": "1120", "dtype": "float32"}, {"name": "1121", "dtype": "float32"}, {"name": "1122", "dtype": "float32"}, {"name": "1123", "dtype": "float32"}, {"name": "1124", "dtype": "float32"}, {"name": "1125", "dtype": "float32"}, {"name": "1126", "dtype": "float32"}, {"name": "1127", "dtype": "float32"}, {"name": "1128", "dtype": "float32"}, {"name": "1129", "dtype": "float32"}, {"name": "1130", "dtype": "float32"}, {"name": "1131", "dtype": "float32"}, {"name": "1132", "dtype": "float32"}, {"name": "1133", "dtype": "float32"}, {"name": "1134", "dtype": "float32"}, {"name": "1135", "dtype": "float32"}, {"name": "1136", "dtype": "float32"}, {"name": "1137", "dtype": "float32"}, {"name": "1138", "dtype": "float32"}, {"name": "1139", "dtype": "float32"}, {"name": "1140", "dtype": "float32"}, {"name": "1141", "dtype": "float32"}, {"name": "1142", "dtype": "float32"}, {"name": "1143", "dtype": "float32"}, {"name": "1144", "dtype": "float32"}, {"name": "1145", "dtype": "float32"}, {"name": "1146", "dtype": "float32"}, {"name": "1147", "dtype": "float32"}, {"name": "1148", "dtype": "float32"}, {"name": "1149", "dtype": "float32"}, {"name": "1150", "dtype": "float32"}, {"name": "1151", "dtype": "float32"}, {"name": "1152", "dtype": "float32"}, {"name": "1153", "dtype": "float32"}, {"name": "1154", "dtype": "float32"}, {"name": "1155", "dtype": "float32"}, {"name": "1156", "dtype": "float32"}, {"name": "1157", "dtype": "float32"}, {"name": "1158", "dtype": "float32"}, {"name": "1159", "dtype": "float32"}, {"name": "1160", "dtype": "float32"}, {"name": "1161", "dtype": "float32"}, {"name": "1162", "dtype": "float32"}, {"name": "1163", "dtype": "float32"}, {"name": "1164", "dtype": "float32"}, {"name": "1165", "dtype": "float32"}, {"name": "1166", "dtype": "float32"}, {"name": "1167", "dtype": "float32"}, {"name": "1168", "dtype": "float32"}, {"name": "1169", "dtype": "float32"}, {"name": "1170", "dtype": "float32"}, {"name": "1171", "dtype": "float32"}, {"name": "1172", "dtype": "float32"}, {"name": "1173", "dtype": "float32"}, {"name": "1174", "dtype": "float32"}, {"name": "1175", "dtype": "float32"}, {"name": "1176", "dtype": "float32"}, {"name": "1177", "dtype": "float32"}, {"name": "1178", "dtype": "float32"}, {"name": "1179", "dtype": "float32"}, {"name": "1180", "dtype": "float32"}, {"name": "1181", "dtype": "float32"}, {"name": "1182", "dtype": "float32"}, {"name": "1183", "dtype": "float32"}, {"name": "1184", "dtype": "float32"}, {"name": "1185", "dtype": "float32"}, {"name": "1186", "dtype": "float32"}, {"name": "1187", "dtype": "float32"}, {"name": "1188", "dtype": "float32"}, {"name": "1189", "dtype": "float32"}, {"name": "1190", "dtype": "float32"}, {"name": "1191", "dtype": "float32"}, {"name": "1192", "dtype": "float32"}, {"name": "1193", "dtype": "float32"}, {"name": "1194", "dtype": "float32"}, {"name": "1195", "dtype": "float32"}, {"name": "1196", "dtype": "float32"}, {"name": "1197", "dtype": "float32"}, {"name": "1198", "dtype": "float32"}, {"name": "1199", "dtype": "float32"}, {"name": "1200", "dtype": "float32"}, {"name": "1201", "dtype": "float32"}, {"name": "1202", "dtype": "float32"}, {"name": "1203", "dtype": "float32"}, {"name": "1204", "dtype": "float32"}, {"name": "1205", "dtype": "float32"}, {"name": "1206", "dtype": "float32"}, {"name": "1207", "dtype": "float32"}, {"name": "1208", "dtype": "float32"}, {"name": "1209", "dtype": "float32"}, {"name": "1210", "dtype": "float32"}, {"name": "1211", "dtype": "float32"}, {"name": "1212", "dtype": "float32"}, {"name": "1213", "dtype": "float32"}, {"name": "1214", "dtype": "float32"}, {"name": "1215", "dtype": "float32"}, {"name": "1216", "dtype": "float32"}, {"name": "1217", "dtype": "float32"}, {"name": "1218", "dtype": "float32"}, {"name": "1219", "dtype": "float32"}, {"name": "1220", "dtype": "float32"}, {"name": "1221", "dtype": "float32"}, {"name": "1222", "dtype": "float32"}, {"name": "1223", "dtype": "float32"}, {"name": "1224", "dtype": "float32"}, {"name": "1225", "dtype": "float32"}, {"name": "1226", "dtype": "float32"}, {"name": "1227", "dtype": "float32"}, {"name": "1228", "dtype": "float32"}, {"name": "1229", "dtype": "float32"}, {"name": "1230", "dtype": "float32"}, {"name": "1231", "dtype": "float32"}, {"name": "1232", "dtype": "float32"}, {"name": "1233", "dtype": "float32"}, {"name": "1234", "dtype": "float32"}, {"name": "1235", "dtype": "float32"}, {"name": "1236", "dtype": "float32"}, {"name": "1237", "dtype": "float32"}, {"name": "1238", "dtype": "float32"}, {"name": "1239", "dtype": "float32"}, {"name": "1240", "dtype": "float32"}, {"name": "1241", "dtype": "float32"}, {"name": "1242", "dtype": "float32"}, {"name": "1243", "dtype": "float32"}, {"name": "1244", "dtype": "float32"}, {"name": "1245", "dtype": "float32"}, {"name": "1246", "dtype": "float32"}, {"name": "1247", "dtype": "float32"}, {"name": "1248", "dtype": "float32"}, {"name": "1249", "dtype": "float32"}, {"name": "1250", "dtype": "float32"}, {"name": "1251", "dtype": "float32"}, {"name": "1252", "dtype": "float32"}, {"name": "1253", "dtype": "float32"}, {"name": "1254", "dtype": "float32"}, {"name": "1255", "dtype": "float32"}, {"name": "1256", "dtype": "float32"}, {"name": "1257", "dtype": "float32"}, {"name": "1258", "dtype": "float32"}, {"name": "1259", "dtype": "float32"}, {"name": "1260", "dtype": "float32"}, {"name": "1261", "dtype": "float32"}, {"name": "1262", "dtype": "float32"}, {"name": "1263", "dtype": "float32"}, {"name": "1264", "dtype": "float32"}, {"name": "1265", "dtype": "float32"}, {"name": "1266", "dtype": "float32"}, {"name": "1267", "dtype": "float32"}, {"name": "1268", "dtype": "float32"}, {"name": "1269", "dtype": "float32"}, {"name": "1270", "dtype": "float32"}, {"name": "1271", "dtype": "float32"}, {"name": "1272", "dtype": "float32"}, {"name": "1273", "dtype": "float32"}, {"name": "1274", "dtype": "float32"}, {"name": "1275", "dtype": "float32"}, {"name": "1276", "dtype": "float32"}, {"name": "1277", "dtype": "float32"}, {"name": "1278", "dtype": "float32"}, {"name": "1279", "dtype": "float32"}, {"name": "1280", "dtype": "float32"}, {"name": "1281", "dtype": "float32"}, {"name": "1282", "dtype": "float32"}, {"name": "1283", "dtype": "float32"}, {"name": "1284", "dtype": "float32"}, {"name": "1285", "dtype": "float32"}, {"name": "1286", "dtype": "float32"}, {"name": "1287", "dtype": "float32"}, {"name": "1288", "dtype": "float32"}, {"name": "1289", "dtype": "float32"}, {"name": "1290", "dtype": "float32"}, {"name": "1291", "dtype": "float32"}, {"name": "1292", "dtype": "float32"}, {"name": "1293", "dtype": "float32"}, {"name": "1294", "dtype": "float32"}, {"name": "1295", "dtype": "float32"}, {"name": "1296", "dtype": "float32"}, {"name": "1297", "dtype": "float32"}, {"name": "1298", "dtype": "float32"}, {"name": "1299", "dtype": "float32"}, {"name": "1300", "dtype": "float32"}, {"name": "1301", "dtype": "float32"}, {"name": "1302", "dtype": "float32"}, {"name": "1303", "dtype": "float32"}, {"name": "1304", "dtype": "float32"}, {"name": "1305", "dtype": "float32"}, {"name": "1306", "dtype": "float32"}, {"name": "1307", "dtype": "float32"}, {"name": "1308", "dtype": "float32"}, {"name": "1309", "dtype": "float32"}, {"name": "1310", "dtype": "float32"}, {"name": "1311", "dtype": "float32"}, {"name": "1312", "dtype": "float32"}, {"name": "1313", "dtype": "float32"}, {"name": "1314", "dtype": "float32"}, {"name": "1315", "dtype": "float32"}, {"name": "1316", "dtype": "float32"}, {"name": "1317", "dtype": "float32"}, {"name": "1318", "dtype": "float32"}, {"name": "1319", "dtype": "float32"}, {"name": "1320", "dtype": "float32"}, {"name": "1321", "dtype": "float32"}, {"name": "1322", "dtype": "float32"}, {"name": "1323", "dtype": "float32"}, {"name": "1324", "dtype": "float32"}, {"name": "1325", "dtype": "float32"}, {"name": "1326", "dtype": "float32"}, {"name": "1327", "dtype": "float32"}, {"name": "1328", "dtype": "float32"}, {"name": "1329", "dtype": "float32"}, {"name": "1330", "dtype": "float32"}, {"name": "1331", "dtype": "float32"}, {"name": "1332", "dtype": "float32"}, {"name": "1333", "dtype": "float32"}, {"name": "1334", "dtype": "float32"}, {"name": "1335", "dtype": "float32"}, {"name": "1336", "dtype": "float32"}, {"name": "1337", "dtype": "float32"}, {"name": "1338", "dtype": "float32"}, {"name": "1339", "dtype": "float32"}, {"name": "1340", "dtype": "float32"}, {"name": "1341", "dtype": "float32"}, {"name": "1342", "dtype": "float32"}, {"name": "1343", "dtype": "float32"}, {"name": "1344", "dtype": "float32"}, {"name": "1345", "dtype": "float32"}, {"name": "1346", "dtype": "float32"}, {"name": "1347", "dtype": "float32"}, {"name": "1348", "dtype": "float32"}, {"name": "1349", "dtype": "float32"}, {"name": "1350", "dtype": "float32"}, {"name": "1351", "dtype": "float32"}, {"name": "1352", "dtype": "float32"}, {"name": "1353", "dtype": "float32"}, {"name": "1354", "dtype": "float32"}, {"name": "1355", "dtype": "float32"}, {"name": "1356", "dtype": "float32"}, {"name": "1357", "dtype": "float32"}, {"name": "1358", "dtype": "float32"}, {"name": "1359", "dtype": "float32"}, {"name": "1360", "dtype": "float32"}, {"name": "1361", "dtype": "float32"}, {"name": "1362", "dtype": "float32"}, {"name": "1363", "dtype": "float32"}, {"name": "1364", "dtype": "float32"}, {"name": "1365", "dtype": "float32"}, {"name": "1366", "dtype": "float32"}, {"name": "1367", "dtype": "float32"}, {"name": "1368", "dtype": "float32"}, {"name": "1369", "dtype": "float32"}, {"name": "1370", "dtype": "float32"}, {"name": "1371", "dtype": "float32"}, {"name": "1372", "dtype": "float32"}, {"name": "1373", "dtype": "float32"}, {"name": "1374", "dtype": "float32"}, {"name": "1375", "dtype": "float32"}, {"name": "1376", "dtype": "float32"}, {"name": "1377", "dtype": "float32"}, {"name": "1378", "dtype": "float32"}, {"name": "1379", "dtype": "float32"}, {"name": "1380", "dtype": "float32"}, {"name": "1381", "dtype": "float32"}, {"name": "1382", "dtype": "float32"}, {"name": "1383", "dtype": "float32"}, {"name": "1384", "dtype": "float32"}, {"name": "1385", "dtype": "float32"}, {"name": "1386", "dtype": "float32"}, {"name": "1387", "dtype": "float32"}, {"name": "1388", "dtype": "float32"}, {"name": "1389", "dtype": "float32"}, {"name": "1390", "dtype": "float32"}, {"name": "1391", "dtype": "float32"}, {"name": "1392", "dtype": "float32"}, {"name": "1393", "dtype": "float32"}, {"name": "1394", "dtype": "float32"}, {"name": "1395", "dtype": "float32"}, {"name": "1396", "dtype": "float32"}, {"name": "1397", "dtype": "float32"}, {"name": "1398", "dtype": "float32"}, {"name": "1399", "dtype": "float32"}, {"name": "1400", "dtype": "float32"}, {"name": "1401", "dtype": "float32"}, {"name": "1402", "dtype": "float32"}, {"name": "1403", "dtype": "float32"}, {"name": "1404", "dtype": "float32"}, {"name": "1405", "dtype": "float32"}, {"name": "1406", "dtype": "float32"}, {"name": "1407", "dtype": "float32"}, {"name": "1408", "dtype": "float32"}, {"name": "1409", "dtype": "float32"}, {"name": "1410", "dtype": "float32"}, {"name": "1411", "dtype": "float32"}, {"name": "1412", "dtype": "float32"}, {"name": "1413", "dtype": "float32"}, {"name": "1414", "dtype": "float32"}, {"name": "1415", "dtype": "float32"}, {"name": "1416", "dtype": "float32"}, {"name": "1417", "dtype": "float32"}, {"name": "1418", "dtype": "float32"}, {"name": "1419", "dtype": "float32"}, {"name": "1420", "dtype": "float32"}, {"name": "1421", "dtype": "float32"}, {"name": "1422", "dtype": "float32"}, {"name": "1423", "dtype": "float32"}, {"name": "1424", "dtype": "float32"}, {"name": "1425", "dtype": "float32"}, {"name": "1426", "dtype": "float32"}, {"name": "1427", "dtype": "float32"}, {"name": "1428", "dtype": "float32"}, {"name": "1429", "dtype": "float32"}, {"name": "1430", "dtype": "float32"}, {"name": "1431", "dtype": "float32"}, {"name": "1432", "dtype": "float32"}, {"name": "1433", "dtype": "float32"}, {"name": "1434", "dtype": "float32"}, {"name": "1435", "dtype": "float32"}, {"name": "1436", "dtype": "float32"}, {"name": "1437", "dtype": "float32"}, {"name": "1438", "dtype": "float32"}, {"name": "1439", "dtype": "float32"}, {"name": "1440", "dtype": "float32"}, {"name": "1441", "dtype": "float32"}, {"name": "1442", "dtype": "float32"}, {"name": "1443", "dtype": "float32"}, {"name": "1444", "dtype": "float32"}, {"name": "1445", "dtype": "float32"}, {"name": "1446", "dtype": "float32"}, {"name": "1447", "dtype": "float32"}, {"name": "1448", "dtype": "float32"}, {"name": "1449", "dtype": "float32"}, {"name": "1450", "dtype": "float32"}, {"name": "1451", "dtype": "float32"}, {"name": "1452", "dtype": "float32"}, {"name": "1453", "dtype": "float32"}, {"name": "1454", "dtype": "float32"}, {"name": "1455", "dtype": "float32"}, {"name": "1456", "dtype": "float32"}, {"name": "1457", "dtype": "float32"}, {"name": "1458", "dtype": "float32"}, {"name": "1459", "dtype": "float32"}, {"name": "1460", "dtype": "float32"}, {"name": "1461", "dtype": "float32"}, {"name": "1462", "dtype": "float32"}, {"name": "1463", "dtype": "float32"}, {"name": "1464", "dtype": "float32"}, {"name": "1465", "dtype": "float32"}, {"name": "1466", "dtype": "float32"}, {"name": "1467", "dtype": "float32"}, {"name": "1468", "dtype": "float32"}, {"name": "1469", "dtype": "float32"}, {"name": "1470", "dtype": "float32"}, {"name": "1471", "dtype": "float32"}, {"name": "1472", "dtype": "float32"}, {"name": "1473", "dtype": "float32"}, {"name": "1474", "dtype": "float32"}, {"name": "1475", "dtype": "float32"}, {"name": "1476", "dtype": "float32"}, {"name": "1477", "dtype": "float32"}, {"name": "1478", "dtype": "float32"}, {"name": "1479", "dtype": "float32"}, {"name": "1480", "dtype": "float32"}, {"name": "1481", "dtype": "float32"}, {"name": "1482", "dtype": "float32"}, {"name": "1483", "dtype": "float32"}, {"name": "1484", "dtype": "float32"}, {"name": "1485", "dtype": "float32"}, {"name": "1486", "dtype": "float32"}, {"name": "1487", "dtype": "float32"}, {"name": "1488", "dtype": "float32"}, {"name": "1489", "dtype": "float32"}, {"name": "1490", "dtype": "float32"}, {"name": "1491", "dtype": "float32"}, {"name": "1492", "dtype": "float32"}, {"name": "1493", "dtype": "float32"}, {"name": "1494", "dtype": "float32"}, {"name": "1495", "dtype": "float32"}, {"name": "1496", "dtype": "float32"}, {"name": "1497", "dtype": "float32"}, {"name": "1498", "dtype": "float32"}, {"name": "1499", "dtype": "float32"}, {"name": "1500", "dtype": "float32"}, {"name": "1501", "dtype": "float32"}, {"name": "1502", "dtype": "float32"}, {"name": "1503", "dtype": "float32"}, {"name": "1504", "dtype": "float32"}, {"name": "1505", "dtype": "float32"}, {"name": "1506", "dtype": "float32"}, {"name": "1507", "dtype": "float32"}, {"name": "1508", "dtype": "float32"}, {"name": "1509", "dtype": "float32"}, {"name": "1510", "dtype": "float32"}, {"name": "1511", "dtype": "float32"}, {"name": "1512", "dtype": "float32"}, {"name": "1513", "dtype": "float32"}, {"name": "1514", "dtype": "float32"}, {"name": "1515", "dtype": "float32"}, {"name": "1516", "dtype": "float32"}, {"name": "1517", "dtype": "float32"}, {"name": "1518", "dtype": "float32"}, {"name": "1519", "dtype": "float32"}, {"name": "1520", "dtype": "float32"}, {"name": "1521", "dtype": "float32"}, {"name": "1522", "dtype": "float32"}, {"name": "1523", "dtype": "float32"}, {"name": "1524", "dtype": "float32"}, {"name": "1525", "dtype": "float32"}, {"name": "1526", "dtype": "float32"}, {"name": "1527", "dtype": "float32"}, {"name": "1528", "dtype": "float32"}, {"name": "1529", "dtype": "float32"}, {"name": "1530", "dtype": "float32"}, {"name": "1531", "dtype": "float32"}, {"name": "1532", "dtype": "float32"}, {"name": "1533", "dtype": "float32"}, {"name": "1534", "dtype": "float32"}, {"name": "1535", "dtype": "float32"}, {"name": "1536", "dtype": "float32"}, {"name": "1537", "dtype": "float32"}, {"name": "1538", "dtype": "float32"}, {"name": "1539", "dtype": "float32"}, {"name": "1540", "dtype": "float32"}, {"name": "1541", "dtype": "float32"}, {"name": "1542", "dtype": "float32"}, {"name": "1543", "dtype": "float32"}, {"name": "1544", "dtype": "float32"}, {"name": "1545", "dtype": "float32"}, {"name": "1546", "dtype": "float32"}, {"name": "1547", "dtype": "float32"}, {"name": "1548", "dtype": "float32"}, {"name": "1549", "dtype": "float32"}, {"name": "1550", "dtype": "float32"}, {"name": "1551", "dtype": "float32"}, {"name": "1552", "dtype": "float32"}, {"name": "1553", "dtype": "float32"}, {"name": "1554", "dtype": "float32"}, {"name": "1555", "dtype": "float32"}, {"name": "1556", "dtype": "float32"}, {"name": "1557", "dtype": "float32"}, {"name": "1558", "dtype": "float32"}, {"name": "1559", "dtype": "float32"}, {"name": "1560", "dtype": "float32"}, {"name": "1561", "dtype": "float32"}, {"name": "1562", "dtype": "float32"}, {"name": "1563", "dtype": "float32"}, {"name": "1564", "dtype": "float32"}, {"name": "1565", "dtype": "float32"}, {"name": "1566", "dtype": "float32"}, {"name": "1567", "dtype": "float32"}, {"name": "1568", "dtype": "float32"}, {"name": "1569", "dtype": "float32"}, {"name": "1570", "dtype": "float32"}, {"name": "1571", "dtype": "float32"}, {"name": "1572", "dtype": "float32"}, {"name": "1573", "dtype": "float32"}, {"name": "1574", "dtype": "float32"}, {"name": "1575", "dtype": "float32"}, {"name": "1576", "dtype": "float32"}, {"name": "1577", "dtype": "float32"}, {"name": "1578", "dtype": "float32"}, {"name": "1579", "dtype": "float32"}, {"name": "1580", "dtype": "float32"}, {"name": "1581", "dtype": "float32"}, {"name": "1582", "dtype": "float32"}, {"name": "1583", "dtype": "float32"}, {"name": "1584", "dtype": "float32"}, {"name": "1585", "dtype": "float32"}, {"name": "1586", "dtype": "float32"}, {"name": "1587", "dtype": "float32"}, {"name": "1588", "dtype": "float32"}, {"name": "1589", "dtype": "float32"}, {"name": "1590", "dtype": "float32"}, {"name": "1591", "dtype": "float32"}, {"name": "1592", "dtype": "float32"}, {"name": "1593", "dtype": "float32"}, {"name": "1594", "dtype": "float32"}, {"name": "1595", "dtype": "float32"}, {"name": "1596", "dtype": "float32"}, {"name": "1597", "dtype": "float32"}, {"name": "1598", "dtype": "float32"}, {"name": "1599", "dtype": "float32"}, {"name": "1600", "dtype": "float32"}, {"name": "1601", "dtype": "float32"}, {"name": "1602", "dtype": "float32"}, {"name": "1603", "dtype": "float32"}, {"name": "1604", "dtype": "float32"}, {"name": "1605", "dtype": "float32"}, {"name": "1606", "dtype": "float32"}, {"name": "1607", "dtype": "float32"}, {"name": "1608", "dtype": "float32"}, {"name": "1609", "dtype": "float32"}, {"name": "1610", "dtype": "float32"}, {"name": "1611", "dtype": "float32"}, {"name": "1612", "dtype": "float32"}, {"name": "1613", "dtype": "float32"}, {"name": "1614", "dtype": "float32"}, {"name": "1615", "dtype": "float32"}, {"name": "1616", "dtype": "float32"}, {"name": "1617", "dtype": "float32"}, {"name": "1618", "dtype": "float32"}, {"name": "1619", "dtype": "float32"}, {"name": "1620", "dtype": "float32"}, {"name": "1621", "dtype": "float32"}, {"name": "1622", "dtype": "float32"}, {"name": "1623", "dtype": "float32"}, {"name": "1624", "dtype": "float32"}, {"name": "1625", "dtype": "float32"}, {"name": "1626", "dtype": "float32"}, {"name": "1627", "dtype": "float32"}, {"name": "1628", "dtype": "float32"}, {"name": "1629", "dtype": "float32"}, {"name": "1630", "dtype": "float32"}, {"name": "1631", "dtype": "float32"}, {"name": "1632", "dtype": "float32"}, {"name": "1633", "dtype": "float32"}, {"name": "1634", "dtype": "float32"}, {"name": "1635", "dtype": "float32"}, {"name": "1636", "dtype": "float32"}, {"name": "1637", "dtype": "float32"}, {"name": "1638", "dtype": "float32"}, {"name": "1639", "dtype": "float32"}, {"name": "1640", "dtype": "float32"}, {"name": "1641", "dtype": "float32"}, {"name": "1642", "dtype": "float32"}, {"name": "1643", "dtype": "float32"}, {"name": "1644", "dtype": "float32"}, {"name": "1645", "dtype": "float32"}, {"name": "1646", "dtype": "float32"}, {"name": "1647", "dtype": "float32"}, {"name": "1648", "dtype": "float32"}, {"name": "1649", "dtype": "float32"}, {"name": "1650", "dtype": "float32"}, {"name": "1651", "dtype": "float32"}, {"name": "1652", "dtype": "float32"}, {"name": "1653", "dtype": "float32"}, {"name": "1654", "dtype": "float32"}, {"name": "1655", "dtype": "float32"}, {"name": "1656", "dtype": "float32"}, {"name": "1657", "dtype": "float32"}, {"name": "1658", "dtype": "float32"}, {"name": "1659", "dtype": "float32"}, {"name": "1660", "dtype": "float32"}, {"name": "1661", "dtype": "float32"}, {"name": "1662", "dtype": "float32"}, {"name": "1663", "dtype": "float32"}, {"name": "1664", "dtype": "float32"}, {"name": "1665", "dtype": "float32"}, {"name": "1666", "dtype": "float32"}, {"name": "1667", "dtype": "float32"}, {"name": "1668", "dtype": "float32"}, {"name": "1669", "dtype": "float32"}, {"name": "1670", "dtype": "float32"}, {"name": "1671", "dtype": "float32"}, {"name": "1672", "dtype": "float32"}, {"name": "1673", "dtype": "float32"}, {"name": "1674", "dtype": "float32"}, {"name": "1675", "dtype": "float32"}, {"name": "1676", "dtype": "float32"}, {"name": "1677", "dtype": "float32"}, {"name": "1678", "dtype": "float32"}, {"name": "1679", "dtype": "float32"}, {"name": "1680", "dtype": "float32"}, {"name": "1681", "dtype": "float32"}, {"name": "1682", "dtype": "float32"}, {"name": "1683", "dtype": "float32"}, {"name": "1684", "dtype": "float32"}, {"name": "1685", "dtype": "float32"}, {"name": "1686", "dtype": "float32"}, {"name": "1687", "dtype": "float32"}, {"name": "1688", "dtype": "float32"}, {"name": "1689", "dtype": "float32"}, {"name": "1690", "dtype": "float32"}, {"name": "1691", "dtype": "float32"}, {"name": "1692", "dtype": "float32"}, {"name": "1693", "dtype": "float32"}, {"name": "1694", "dtype": "float32"}, {"name": "1695", "dtype": "float32"}, {"name": "1696", "dtype": "float32"}, {"name": "1697", "dtype": "float32"}, {"name": "1698", "dtype": "float32"}, {"name": "1699", "dtype": "float32"}, {"name": "1700", "dtype": "float32"}, {"name": "1701", "dtype": "float32"}, {"name": "1702", "dtype": "float32"}, {"name": "1703", "dtype": "float32"}, {"name": "1704", "dtype": "float32"}, {"name": "1705", "dtype": "float32"}, {"name": "1706", "dtype": "float32"}, {"name": "1707", "dtype": "float32"}, {"name": "1708", "dtype": "float32"}, {"name": "1709", "dtype": "float32"}, {"name": "1710", "dtype": "float32"}, {"name": "1711", "dtype": "float32"}, {"name": "1712", "dtype": "float32"}, {"name": "1713", "dtype": "float32"}, {"name": "1714", "dtype": "float32"}, {"name": "1715", "dtype": "float32"}, {"name": "1716", "dtype": "float32"}, {"name": "1717", "dtype": "float32"}, {"name": "1718", "dtype": "float32"}, {"name": "1719", "dtype": "float32"}, {"name": "1720", "dtype": "float32"}, {"name": "1721", "dtype": "float32"}, {"name": "1722", "dtype": "float32"}, {"name": "1723", "dtype": "float32"}, {"name": "1724", "dtype": "float32"}, {"name": "1725", "dtype": "float32"}, {"name": "1726", "dtype": "float32"}, {"name": "1727", "dtype": "float32"}, {"name": "1728", "dtype": "float32"}, {"name": "1729", "dtype": "float32"}, {"name": "1730", "dtype": "float32"}, {"name": "1731", "dtype": "float32"}, {"name": "1732", "dtype": "float32"}, {"name": "1733", "dtype": "float32"}, {"name": "1734", "dtype": "float32"}, {"name": "1735", "dtype": "float32"}, {"name": "1736", "dtype": "float32"}, {"name": "1737", "dtype": "float32"}, {"name": "1738", "dtype": "float32"}, {"name": "1739", "dtype": "float32"}, {"name": "1740", "dtype": "float32"}, {"name": "1741", "dtype": "float32"}, {"name": "1742", "dtype": "float32"}, {"name": "1743", "dtype": "float32"}, {"name": "1744", "dtype": "float32"}, {"name": "1745", "dtype": "float32"}, {"name": "1746", "dtype": "float32"}, {"name": "1747", "dtype": "float32"}, {"name": "1748", "dtype": "float32"}, {"name": "1749", "dtype": "float32"}, {"name": "1750", "dtype": "float32"}, {"name": "1751", "dtype": "float32"}, {"name": "1752", "dtype": "float32"}, {"name": "1753", "dtype": "float32"}, {"name": "1754", "dtype": "float32"}, {"name": "1755", "dtype": "float32"}, {"name": "1756", "dtype": "float32"}, {"name": "1757", "dtype": "float32"}, {"name": "1758", "dtype": "float32"}, {"name": "1759", "dtype": "float32"}, {"name": "1760", "dtype": "float32"}, {"name": "1761", "dtype": "float32"}, {"name": "1762", "dtype": "float32"}, {"name": "1763", "dtype": "float32"}, {"name": "1764", "dtype": "float32"}, {"name": "1765", "dtype": "float32"}, {"name": "1766", "dtype": "float32"}, {"name": "1767", "dtype": "float32"}, {"name": "1768", "dtype": "float32"}, {"name": "1769", "dtype": "float32"}, {"name": "1770", "dtype": "float32"}, {"name": "1771", "dtype": "float32"}, {"name": "1772", "dtype": "float32"}, {"name": "1773", "dtype": "float32"}, {"name": "1774", "dtype": "float32"}, {"name": "1775", "dtype": "float32"}, {"name": "1776", "dtype": "float32"}, {"name": "1777", "dtype": "float32"}, {"name": "1778", "dtype": "float32"}, {"name": "1779", "dtype": "float32"}, {"name": "1780", "dtype": "float32"}, {"name": "1781", "dtype": "float32"}, {"name": "1782", "dtype": "float32"}, {"name": "1783", "dtype": "float32"}, {"name": "1784", "dtype": "float32"}, {"name": "1785", "dtype": "float32"}, {"name": "1786", "dtype": "float32"}, {"name": "1787", "dtype": "float32"}, {"name": "1788", "dtype": "float32"}, {"name": "1789", "dtype": "float32"}, {"name": "1790", "dtype": "float32"}, {"name": "1791", "dtype": "float32"}, {"name": "1792", "dtype": "float32"}, {"name": "1793", "dtype": "float32"}, {"name": "1794", "dtype": "float32"}, {"name": "1795", "dtype": "float32"}, {"name": "1796", "dtype": "float32"}, {"name": "1797", "dtype": "float32"}, {"name": "1798", "dtype": "float32"}, {"name": "1799", "dtype": "float32"}, {"name": "1800", "dtype": "float32"}, {"name": "1801", "dtype": "float32"}, {"name": "1802", "dtype": "float32"}, {"name": "1803", "dtype": "float32"}, {"name": "1804", "dtype": "float32"}, {"name": "1805", "dtype": "float32"}, {"name": "1806", "dtype": "float32"}, {"name": "1807", "dtype": "float32"}, {"name": "1808", "dtype": "float32"}, {"name": "1809", "dtype": "float32"}, {"name": "1810", "dtype": "float32"}, {"name": "1811", "dtype": "float32"}, {"name": "1812", "dtype": "float32"}, {"name": "1813", "dtype": "float32"}, {"name": "1814", "dtype": "float32"}, {"name": "1815", "dtype": "float32"}, {"name": "1816", "dtype": "float32"}, {"name": "1817", "dtype": "float32"}, {"name": "1818", "dtype": "float32"}, {"name": "1819", "dtype": "float32"}, {"name": "1820", "dtype": "float32"}, {"name": "1821", "dtype": "float32"}, {"name": "1822", "dtype": "float32"}, {"name": "1823", "dtype": "float32"}, {"name": "1824", "dtype": "float32"}, {"name": "1825", "dtype": "float32"}, {"name": "1826", "dtype": "float32"}, {"name": "1827", "dtype": "float32"}, {"name": "1828", "dtype": "float32"}, {"name": "1829", "dtype": "float32"}, {"name": "1830", "dtype": "float32"}, {"name": "1831", "dtype": "float32"}, {"name": "1832", "dtype": "float32"}, {"name": "1833", "dtype": "float32"}, {"name": "1834", "dtype": "float32"}, {"name": "1835", "dtype": "float32"}, {"name": "1836", "dtype": "float32"}, {"name": "1837", "dtype": "float32"}, {"name": "1838", "dtype": "float32"}, {"name": "1839", "dtype": "float32"}, {"name": "1840", "dtype": "float32"}, {"name": "1841", "dtype": "float32"}, {"name": "1842", "dtype": "float32"}, {"name": "1843", "dtype": "float32"}, {"name": "1844", "dtype": "float32"}, {"name": "1845", "dtype": "float32"}, {"name": "1846", "dtype": "float32"}, {"name": "1847", "dtype": "float32"}, {"name": "1848", "dtype": "float32"}, {"name": "1849", "dtype": "float32"}, {"name": "1850", "dtype": "float32"}, {"name": "1851", "dtype": "float32"}, {"name": "1852", "dtype": "float32"}, {"name": "1853", "dtype": "float32"}, {"name": "1854", "dtype": "float32"}, {"name": "1855", "dtype": "float32"}, {"name": "1856", "dtype": "float32"}, {"name": "1857", "dtype": "float32"}, {"name": "1858", "dtype": "float32"}, {"name": "1859", "dtype": "float32"}, {"name": "1860", "dtype": "float32"}, {"name": "1861", "dtype": "float32"}, {"name": "1862", "dtype": "float32"}, {"name": "1863", "dtype": "float32"}, {"name": "1864", "dtype": "float32"}, {"name": "1865", "dtype": "float32"}, {"name": "1866", "dtype": "float32"}, {"name": "1867", "dtype": "float32"}, {"name": "1868", "dtype": "float32"}, {"name": "1869", "dtype": "float32"}, {"name": "1870", "dtype": "float32"}, {"name": "1871", "dtype": "float32"}, {"name": "1872", "dtype": "float32"}, {"name": "1873", "dtype": "float32"}, {"name": "1874", "dtype": "float32"}, {"name": "1875", "dtype": "float32"}, {"name": "1876", "dtype": "float32"}, {"name": "1877", "dtype": "float32"}, {"name": "1878", "dtype": "float32"}, {"name": "1879", "dtype": "float32"}, {"name": "1880", "dtype": "float32"}, {"name": "1881", "dtype": "float32"}, {"name": "1882", "dtype": "float32"}, {"name": "1883", "dtype": "float32"}, {"name": "1884", "dtype": "float32"}, {"name": "1885", "dtype": "float32"}, {"name": "1886", "dtype": "float32"}, {"name": "1887", "dtype": "float32"}, {"name": "1888", "dtype": "float32"}, {"name": "1889", "dtype": "float32"}, {"name": "1890", "dtype": "float32"}, {"name": "1891", "dtype": "float32"}, {"name": "1892", "dtype": "float32"}, {"name": "1893", "dtype": "float32"}, {"name": "1894", "dtype": "float32"}, {"name": "1895", "dtype": "float32"}, {"name": "1896", "dtype": "float32"}, {"name": "1897", "dtype": "float32"}, {"name": "1898", "dtype": "float32"}, {"name": "1899", "dtype": "float32"}, {"name": "1900", "dtype": "float32"}, {"name": "1901", "dtype": "float32"}, {"name": "1902", "dtype": "float32"}, {"name": "1903", "dtype": "float32"}, {"name": "1904", "dtype": "float32"}, {"name": "1905", "dtype": "float32"}, {"name": "1906", "dtype": "float32"}, {"name": "1907", "dtype": "float32"}, {"name": "1908", "dtype": "float32"}, {"name": "1909", "dtype": "float32"}, {"name": "1910", "dtype": "float32"}, {"name": "1911", "dtype": "float32"}, {"name": "1912", "dtype": "float32"}, {"name": "1913", "dtype": "float32"}, {"name": "1914", "dtype": "float32"}, {"name": "1915", "dtype": "float32"}, {"name": "1916", "dtype": "float32"}, {"name": "1917", "dtype": "float32"}, {"name": "1918", "dtype": "float32"}, {"name": "1919", "dtype": "float32"}, {"name": "1920", "dtype": "float32"}, {"name": "1921", "dtype": "float32"}, {"name": "1922", "dtype": "float32"}, {"name": "1923", "dtype": "float32"}, {"name": "1924", "dtype": "float32"}, {"name": "1925", "dtype": "float32"}, {"name": "1926", "dtype": "float32"}, {"name": "1927", "dtype": "float32"}, {"name": "1928", "dtype": "float32"}, {"name": "1929", "dtype": "float32"}, {"name": "1930", "dtype": "float32"}, {"name": "1931", "dtype": "float32"}, {"name": "1932", "dtype": "float32"}, {"name": "1933", "dtype": "float32"}, {"name": "1934", "dtype": "float32"}, {"name": "1935", "dtype": "float32"}, {"name": "1936", "dtype": "float32"}, {"name": "1937", "dtype": "float32"}, {"name": "1938", "dtype": "float32"}, {"name": "1939", "dtype": "float32"}, {"name": "1940", "dtype": "float32"}, {"name": "1941", "dtype": "float32"}, {"name": "1942", "dtype": "float32"}, {"name": "1943", "dtype": "float32"}, {"name": "1944", "dtype": "float32"}, {"name": "1945", "dtype": "float32"}, {"name": "1946", "dtype": "float32"}, {"name": "1947", "dtype": "float32"}, {"name": "1948", "dtype": "float32"}, {"name": "1949", "dtype": "float32"}, {"name": "1950", "dtype": "float32"}, {"name": "1951", "dtype": "float32"}, {"name": "1952", "dtype": "float32"}, {"name": "1953", "dtype": "float32"}, {"name": "1954", "dtype": "float32"}, {"name": "1955", "dtype": "float32"}, {"name": "1956", "dtype": "float32"}, {"name": "1957", "dtype": "float32"}, {"name": "1958", "dtype": "float32"}, {"name": "1959", "dtype": "float32"}, {"name": "1960", "dtype": "float32"}, {"name": "1961", "dtype": "float32"}, {"name": "1962", "dtype": "float32"}, {"name": "1963", "dtype": "float32"}, {"name": "1964", "dtype": "float32"}, {"name": "1965", "dtype": "float32"}, {"name": "1966", "dtype": "float32"}, {"name": "1967", "dtype": "float32"}, {"name": "1968", "dtype": "float32"}, {"name": "1969", "dtype": "float32"}, {"name": "1970", "dtype": "float32"}, {"name": "1971", "dtype": "float32"}, {"name": "1972", "dtype": "float32"}, {"name": "1973", "dtype": "float32"}, {"name": "1974", "dtype": "float32"}, {"name": "1975", "dtype": "float32"}, {"name": "1976", "dtype": "float32"}, {"name": "1977", "dtype": "float32"}, {"name": "1978", "dtype": "float32"}, {"name": "1979", "dtype": "float32"}, {"name": "1980", "dtype": "float32"}, {"name": "1981", "dtype": "float32"}, {"name": "1982", "dtype": "float32"}, {"name": "1983", "dtype": "float32"}, {"name": "1984", "dtype": "float32"}, {"name": "1985", "dtype": "float32"}, {"name": "1986", "dtype": "float32"}, {"name": "1987", "dtype": "float32"}, {"name": "1988", "dtype": "float32"}, {"name": "1989", "dtype": "float32"}, {"name": "1990", "dtype": "float32"}, {"name": "1991", "dtype": "float32"}, {"name": "1992", "dtype": "float32"}, {"name": "1993", "dtype": "float32"}, {"name": "1994", "dtype": "float32"}, {"name": "1995", "dtype": "float32"}, {"name": "1996", "dtype": "float32"}, {"name": "1997", "dtype": "float32"}, {"name": "1998", "dtype": "float32"}, {"name": "1999", "dtype": "float32"}, {"name": "2000", "dtype": "float32"}, {"name": "2001", "dtype": "float32"}, {"name": "2002", "dtype": "float32"}, {"name": "2003", "dtype": "float32"}, {"name": "2004", "dtype": "float32"}, {"name": "2005", "dtype": "float32"}, {"name": "2006", "dtype": "float32"}, {"name": "2007", "dtype": "float32"}, {"name": "2008", "dtype": "float32"}, {"name": "2009", "dtype": "float32"}, {"name": "2010", "dtype": "float32"}, {"name": "2011", "dtype": "float32"}, {"name": "2012", "dtype": "float32"}, {"name": "2013", "dtype": "float32"}, {"name": "2014", "dtype": "float32"}, {"name": "2015", "dtype": "float32"}, {"name": "2016", "dtype": "float32"}, {"name": "2017", "dtype": "float32"}, {"name": "2018", "dtype": "float32"}, {"name": "2019", "dtype": "float32"}, {"name": "2020", "dtype": "float32"}, {"name": "2021", "dtype": "float32"}, {"name": "2022", "dtype": "float32"}, {"name": "2023", "dtype": "float32"}, {"name": "2024", "dtype": "float32"}, {"name": "2025", "dtype": "float32"}, {"name": "2026", "dtype": "float32"}, {"name": "2027", "dtype": "float32"}, {"name": "2028", "dtype": "float32"}, {"name": "2029", "dtype": "float32"}, {"name": "2030", "dtype": "float32"}, {"name": "2031", "dtype": "float32"}, {"name": "2032", "dtype": "float32"}, {"name": "2033", "dtype": "float32"}, {"name": "2034", "dtype": "float32"}, {"name": "2035", "dtype": "float32"}, {"name": "2036", "dtype": "float32"}, {"name": "2037", "dtype": "float32"}, {"name": "2038", "dtype": "float32"}, {"name": "2039", "dtype": "float32"}, {"name": "2040", "dtype": "float32"}, {"name": "2041", "dtype": "float32"}, {"name": "2042", "dtype": "float32"}, {"name": "2043", "dtype": "float32"}, {"name": "2044", "dtype": "float32"}, {"name": "2045", "dtype": "float32"}, {"name": "2046", "dtype": "float32"}, {"name": "2047", "dtype": "float32"}, {"name": "label", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 307582709.0625, "num_examples": 37500}, {"name": "test", "num_bytes": 102527570.0, "num_examples": 12500}], "download_size": 565394003, "dataset_size": 410110279.0625}}
2023-08-23T05:56:42+00:00
[]
[]
TAGS #region-us
# Dataset Card for "BGL_GPTNEO_Finetuned" More Information needed
[ "# Dataset Card for \"BGL_GPTNEO_Finetuned\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"BGL_GPTNEO_Finetuned\"\n\nMore Information needed" ]
[ 6, 21 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"BGL_GPTNEO_Finetuned\"\n\nMore Information needed" ]
c488a123ca76e894d6f020ed2cea33ae327a74de
# Dataset of lunasa_prismriver/ルナサ・プリズムリバー/루나사프리즘리버 (Touhou) This is the dataset of lunasa_prismriver/ルナサ・プリズムリバー/루나사프리즘리버 (Touhou), containing 500 images and their tags. The core tags of this character are `blonde_hair, short_hair, hat, yellow_eyes`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:--------------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 500 | 405.30 MiB | [Download](https://huggingface.co/datasets/CyberHarem/lunasa_prismriver_touhou/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 500 | 298.93 MiB | [Download](https://huggingface.co/datasets/CyberHarem/lunasa_prismriver_touhou/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 1057 | 590.05 MiB | [Download](https://huggingface.co/datasets/CyberHarem/lunasa_prismriver_touhou/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 500 | 385.73 MiB | [Download](https://huggingface.co/datasets/CyberHarem/lunasa_prismriver_touhou/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 1057 | 722.76 MiB | [Download](https://huggingface.co/datasets/CyberHarem/lunasa_prismriver_touhou/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/lunasa_prismriver_touhou', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:----------------------------------|:----------------------------------|:----------------------------------|:----------------------------------|:----------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 0 | 8 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, bangs, black_headwear, black_skirt, black_vest, long_sleeves, looking_at_viewer, simple_background, solo, violin, white_background, white_shirt, closed_mouth, bow_(music), crescent, holding_instrument, standing, hat_ornament, frilled_hat, full_body | | 1 | 16 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, black_headwear, black_skirt, black_vest, full_body, long_sleeves, solo, violin, black_footwear, white_shirt, white_socks, closed_mouth, kneehighs, bangs, crescent_hat_ornament, mary_janes, simple_background, bow_(music), white_background, looking_at_viewer, standing, frilled_hat, holding_instrument, playing_instrument | | 2 | 7 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, black_headwear, black_skirt, black_vest, bow_(music), long_sleeves, looking_at_viewer, playing_instrument, solo, violin, white_shirt, bangs, closed_mouth, frills, holding_instrument, smile, blush, crescent_hat_ornament, hair_between_eyes, simple_background | | 3 | 6 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, bow_(music), crescent, long_sleeves, solo, violin, playing_instrument, skirt_set, smile, shirt | | 4 | 14 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | 1girl, solo, violin, bow_(music), skirt, crescent | | 5 | 5 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | 1girl, crescent, solo, skirt_set, looking_at_viewer | | 6 | 6 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | 1girl, skirt_set, solo, blush, long_sleeves, looking_at_viewer, vest, crescent, open_mouth, shirt, on_back | | 7 | 8 | ![](samples/7/clu7-sample0.png) | ![](samples/7/clu7-sample1.png) | ![](samples/7/clu7-sample2.png) | ![](samples/7/clu7-sample3.png) | ![](samples/7/clu7-sample4.png) | 1girl, blush, nipples, solo, after_sex, censored, cum_in_pussy, medium_breasts, cumdrip, open_mouth, anus, navel, nude, tears, black_thighhighs, cum_in_ass, gaping, open_clothes, shirt, small_breasts, spread_legs | | 8 | 5 | ![](samples/8/clu8-sample0.png) | ![](samples/8/clu8-sample1.png) | ![](samples/8/clu8-sample2.png) | ![](samples/8/clu8-sample3.png) | ![](samples/8/clu8-sample4.png) | 1girl, blush, open_shirt, solo, nipples, medium_breasts, no_bra, pussy, bottomless, lying, no_panties, small_breasts | | 9 | 20 | ![](samples/9/clu9-sample0.png) | ![](samples/9/clu9-sample1.png) | ![](samples/9/clu9-sample2.png) | ![](samples/9/clu9-sample3.png) | ![](samples/9/clu9-sample4.png) | 1boy, 1girl, hetero, solo_focus, blush, penis, nipples, sex, cum_in_pussy, vaginal, censored, open_mouth, medium_breasts, thighhighs, navel, open_shirt, small_breasts, tears | | 10 | 7 | ![](samples/10/clu10-sample0.png) | ![](samples/10/clu10-sample1.png) | ![](samples/10/clu10-sample2.png) | ![](samples/10/clu10-sample3.png) | ![](samples/10/clu10-sample4.png) | 1boy, 1girl, blush, hetero, penis, solo_focus, fellatio, male_pubic_hair, nude, cum_in_mouth, medium_breasts, mosaic_censoring | | 11 | 6 | ![](samples/11/clu11-sample0.png) | ![](samples/11/clu11-sample1.png) | ![](samples/11/clu11-sample2.png) | ![](samples/11/clu11-sample3.png) | ![](samples/11/clu11-sample4.png) | 1girl, blush, bottomless, censored, solo, spread_anus, gaping, from_behind, looking_back, pussy_juice, spread_ass | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | bangs | black_headwear | black_skirt | black_vest | long_sleeves | looking_at_viewer | simple_background | solo | violin | white_background | white_shirt | closed_mouth | bow_(music) | crescent | holding_instrument | standing | hat_ornament | frilled_hat | full_body | black_footwear | white_socks | kneehighs | crescent_hat_ornament | mary_janes | playing_instrument | frills | smile | blush | hair_between_eyes | skirt_set | shirt | skirt | vest | open_mouth | on_back | nipples | after_sex | censored | cum_in_pussy | medium_breasts | cumdrip | anus | navel | nude | tears | black_thighhighs | cum_in_ass | gaping | open_clothes | small_breasts | spread_legs | open_shirt | no_bra | pussy | bottomless | lying | no_panties | 1boy | hetero | solo_focus | penis | sex | vaginal | thighhighs | fellatio | male_pubic_hair | cum_in_mouth | mosaic_censoring | spread_anus | from_behind | looking_back | pussy_juice | spread_ass | |----:|----------:|:----------------------------------|:----------------------------------|:----------------------------------|:----------------------------------|:----------------------------------|:--------|:--------|:-----------------|:--------------|:-------------|:---------------|:--------------------|:--------------------|:-------|:---------|:-------------------|:--------------|:---------------|:--------------|:-----------|:---------------------|:-----------|:---------------|:--------------|:------------|:-----------------|:--------------|:------------|:------------------------|:-------------|:---------------------|:---------|:--------|:--------|:--------------------|:------------|:--------|:--------|:-------|:-------------|:----------|:----------|:------------|:-----------|:---------------|:-----------------|:----------|:-------|:--------|:-------|:--------|:-------------------|:-------------|:---------|:---------------|:----------------|:--------------|:-------------|:---------|:--------|:-------------|:--------|:-------------|:-------|:---------|:-------------|:--------|:------|:----------|:-------------|:-----------|:------------------|:---------------|:-------------------|:--------------|:--------------|:---------------|:--------------|:-------------| | 0 | 8 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 1 | 16 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | X | X | | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 2 | 7 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | X | X | X | X | X | X | X | X | X | | X | X | X | | X | | | | | | | | X | | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 3 | 6 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | | | | | X | | | X | X | | | | X | X | | | | | | | | | | | X | | X | | | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 4 | 14 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | X | | | | | | | | X | X | | | | X | X | | | | | | | | | | | | | | | | | | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 5 | 5 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | X | | | | | | X | | X | | | | | | X | | | | | | | | | | | | | | | | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 6 | 6 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | X | | | | | X | X | | X | | | | | | X | | | | | | | | | | | | | | X | | X | X | | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 7 | 8 | ![](samples/7/clu7-sample0.png) | ![](samples/7/clu7-sample1.png) | ![](samples/7/clu7-sample2.png) | ![](samples/7/clu7-sample3.png) | ![](samples/7/clu7-sample4.png) | X | | | | | | | | X | | | | | | | | | | | | | | | | | | | | X | | | X | | | X | | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | 8 | 5 | ![](samples/8/clu8-sample0.png) | ![](samples/8/clu8-sample1.png) | ![](samples/8/clu8-sample2.png) | ![](samples/8/clu8-sample3.png) | ![](samples/8/clu8-sample4.png) | X | | | | | | | | X | | | | | | | | | | | | | | | | | | | | X | | | | | | | | X | | | | X | | | | | | | | | | X | | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | 9 | 20 | ![](samples/9/clu9-sample0.png) | ![](samples/9/clu9-sample1.png) | ![](samples/9/clu9-sample2.png) | ![](samples/9/clu9-sample3.png) | ![](samples/9/clu9-sample4.png) | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | X | | | | | | X | | X | | X | X | X | | | X | | X | | | | | X | | X | | | | | | X | X | X | X | X | X | X | | | | | | | | | | | 10 | 7 | ![](samples/10/clu10-sample0.png) | ![](samples/10/clu10-sample1.png) | ![](samples/10/clu10-sample2.png) | ![](samples/10/clu10-sample3.png) | ![](samples/10/clu10-sample4.png) | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | X | | | | | | | | | | | | X | | | | X | | | | | | | | | | | | | | X | X | X | X | | | | X | X | X | X | | | | | | | 11 | 6 | ![](samples/11/clu11-sample0.png) | ![](samples/11/clu11-sample1.png) | ![](samples/11/clu11-sample2.png) | ![](samples/11/clu11-sample3.png) | ![](samples/11/clu11-sample4.png) | X | | | | | | | | X | | | | | | | | | | | | | | | | | | | | X | | | | | | | | | | X | | | | | | | | | | X | | | | | | | X | | | | | | | | | | | | | | X | X | X | X | X |
CyberHarem/lunasa_prismriver_touhou
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-18T15:54:36+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-01-15T00:17:11+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of lunasa\_prismriver/ルナサ・プリズムリバー/루나사프리즘리버 (Touhou) =========================================================== This is the dataset of lunasa\_prismriver/ルナサ・プリズムリバー/루나사프리즘리버 (Touhou), containing 500 images and their tags. The core tags of this character are 'blonde\_hair, short\_hair, hat, yellow\_eyes', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ 44, 61, 5, 4 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.### Raw Text Version### Table Version" ]
163c6d9151c37d203cda0f93a7b99e303cd3ea46
# Dataset of okazaki_yumemi/岡崎夢美 (Touhou) This is the dataset of okazaki_yumemi/岡崎夢美 (Touhou), containing 285 images and their tags. The core tags of this character are `red_hair, red_eyes, bow, long_hair, braid`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:-----------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 285 | 238.14 MiB | [Download](https://huggingface.co/datasets/CyberHarem/okazaki_yumemi_touhou/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 285 | 163.81 MiB | [Download](https://huggingface.co/datasets/CyberHarem/okazaki_yumemi_touhou/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 519 | 291.92 MiB | [Download](https://huggingface.co/datasets/CyberHarem/okazaki_yumemi_touhou/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 285 | 221.13 MiB | [Download](https://huggingface.co/datasets/CyberHarem/okazaki_yumemi_touhou/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 519 | 367.98 MiB | [Download](https://huggingface.co/datasets/CyberHarem/okazaki_yumemi_touhou/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/okazaki_yumemi_touhou', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 0 | 5 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, cross, red_capelet, single_braid, solo, hair_bow, dress, ribbon, skirt_set, smile | | 1 | 11 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, cross, smile, solo, capelet, skirt, short_hair, hexagram | | 2 | 5 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, capelet, short_hair, smile, solo, cross, open_mouth | | 3 | 6 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, bangs, long_sleeves, looking_at_viewer, red_bowtie, red_capelet, red_skirt, red_vest, smile, solo, white_shirt, closed_mouth, buttons, blush, collared_shirt, short_hair, simple_background | | 4 | 7 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | 1girl, bangs, buttons, collared_shirt, cross, long_sleeves, red_bowtie, red_capelet, red_vest, solo, white_shirt, open_mouth, red_skirt, very_long_hair, looking_at_viewer, frilled_skirt | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | cross | red_capelet | single_braid | solo | hair_bow | dress | ribbon | skirt_set | smile | capelet | skirt | short_hair | hexagram | open_mouth | bangs | long_sleeves | looking_at_viewer | red_bowtie | red_skirt | red_vest | white_shirt | closed_mouth | buttons | blush | collared_shirt | simple_background | very_long_hair | frilled_skirt | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:--------|:--------------|:---------------|:-------|:-----------|:--------|:---------|:------------|:--------|:----------|:--------|:-------------|:-----------|:-------------|:--------|:---------------|:--------------------|:-------------|:------------|:-----------|:--------------|:---------------|:----------|:--------|:-----------------|:--------------------|:-----------------|:----------------| | 0 | 5 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | 1 | 11 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | | | X | | | | | X | X | X | X | X | | | | | | | | | | | | | | | | | 2 | 5 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | X | | | X | | | | | X | X | | X | | X | | | | | | | | | | | | | | | | 3 | 6 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | | X | | X | | | | | X | | | X | | | X | X | X | X | X | X | X | X | X | X | X | X | | | | 4 | 7 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | X | X | X | | X | | | | | | | | | | X | X | X | X | X | X | X | X | | X | | X | | X | X |
CyberHarem/okazaki_yumemi_touhou
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-18T16:12:03+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-01-15T00:28:27+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of okazaki\_yumemi/岡崎夢美 (Touhou) ======================================== This is the dataset of okazaki\_yumemi/岡崎夢美 (Touhou), containing 285 images and their tags. The core tags of this character are 'red\_hair, red\_eyes, bow, long\_hair, braid', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ 44, 61, 5, 4 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.### Raw Text Version### Table Version" ]
89715528098458620c8a8b007ca80d66186673bf
# Myket Android Application Install Dataset This dataset contains information on application install interactions of users in the [Myket](https://myket.ir/) android application market. The dataset was created for the purpose of evaluating interaction prediction models, requiring user and item identifiers along with timestamps of the interactions. ## Data Creation The dataset was initially generated by the Myket data team, and later cleaned and subsampled by Erfan Loghmani a master student at Sharif University of Technology at the time. The data team focused on a two-week period and randomly sampled 1/3 of the users with interactions during that period. They then selected install and update interactions for three months before and after the two-week period, resulting in interactions spanning about 6 months and two weeks. We further subsampled and cleaned the data to focus on application download interactions. We identified the top 8000 most installed applications and selected interactions related to them. We retained users with more than 32 interactions, resulting in 280,391 users. From this group, we randomly selected 10,000 users, and the data was filtered to include only interactions for these users. The detailed procedure can be found in [here](https://github.com/erfanloghmani/myket-android-application-market-dataset/blob/main/create_data.ipynb). ## Data Structure The dataset has two main files. - `myket.csv`: This file contains the interaction information and follows the same format as the datasets used in the "[JODIE: Predicting Dynamic Embedding Trajectory in Temporal Interaction Networks](https://github.com/claws-lab/jodie)" (ACM SIGKDD 2019) project. However, this data does not contain state labels and interaction features, resulting in associated columns being all zero. - `app_info_sample.csv`: This file comprises features associated with applications present in the sample. For each individual application, information such as the approximate number of installs, average rating, count of ratings, and category are included. These features provide insights into the applications present in the dataset. ## Dataset Details - Total Instances: 694,121 install interaction instances - Instances Format: Triplets of user_id, app_name, timestamp - 10,000 users and 7,988 android applications For a detailed summary of the data's statistics, including information on users, applications, and interactions, please refer to the Python notebook available at [summary-stats.ipynb](https://github.com/erfanloghmani/myket-android-application-market-dataset/blob/main/summary-stats.ipynb). The notebook provides an overview of the dataset's characteristics and can be helpful for understanding the data's structure before using it for research or analysis. ### Top 20 Most Installed Applications | Package Name | Count of Interactions | | ---------------------------------- | --------------------- | | com.instagram.android | 15292 | | ir.resaneh1.iptv | 12143 | | com.tencent.ig | 7919 | | com.ForgeGames.SpecialForcesGroup2 | 7797 | | ir.nomogame.ClutchGame | 6193 | | com.dts.freefireth | 6041 | | com.whatsapp | 5876 | | com.supercell.clashofclans | 5817 | | com.mojang.minecraftpe | 5649 | | com.lenovo.anyshare.gps | 5076 | | ir.medu.shad | 4673 | | com.firsttouchgames.dls3 | 4641 | | com.activision.callofduty.shooter | 4357 | | com.tencent.iglite | 4126 | | com.aparat | 3598 | | com.kiloo.subwaysurf | 3135 | | com.supercell.clashroyale | 2793 | | co.palang.QuizOfKings | 2589 | | com.nazdika.app | 2436 | | com.digikala | 2413 | ## Comparison with SNAP Datasets The Myket dataset introduced in this repository exhibits distinct characteristics compared to the real-world datasets used by the project. The table below provides a comparative overview of the key dataset characteristics: | Dataset | #Users | #Items | #Interactions | Average Interactions per User | Average Unique Items per User | | --------- | ----------------- | ----------------- | ----------------- | ----------------------------- | ----------------------------- | | **Myket** | **10,000** | **7,988** | 694,121 | 69.4 | 54.6 | | LastFM | 980 | 1,000 | 1,293,103 | 1,319.5 | 158.2 | | Reddit | **10,000** | 984 | 672,447 | 67.2 | 7.9 | | Wikipedia | 8,227 | 1,000 | 157,474 | 19.1 | 2.2 | | MOOC | 7,047 | 97 | 411,749 | 58.4 | 25.3 | The Myket dataset stands out by having an ample number of both users and items, highlighting its relevance for real-world, large-scale applications. Unlike LastFM, Reddit, and Wikipedia datasets, where users exhibit repetitive item interactions, the Myket dataset contains a comparatively lower amount of repetitive interactions. This unique characteristic reflects the diverse nature of user behaviors in the Android application market environment. ## Citation If you use this dataset in your research, please cite the following [preprint](https://arxiv.org/abs/2308.06862): ``` @misc{loghmani2023effect, title={Effect of Choosing Loss Function when Using T-batching for Representation Learning on Dynamic Networks}, author={Erfan Loghmani and MohammadAmin Fazli}, year={2023}, eprint={2308.06862}, archivePrefix={arXiv}, primaryClass={cs.LG} } ```
erfanloghmani/myket-android-application-recommendation-dataset
[ "task_categories:graph-ml", "size_categories:100K<n<1M", "license:mit", "arxiv:2308.06862", "region:us" ]
2023-08-18T16:13:04+00:00
{"license": "mit", "size_categories": ["100K<n<1M"], "task_categories": ["graph-ml"], "configs": [{"config_name": "main_data", "data_files": "myket.csv"}, {"config_name": "package_name_features", "data_files": "app_info.csv"}]}
2023-08-18T21:00:40+00:00
[ "2308.06862" ]
[]
TAGS #task_categories-graph-ml #size_categories-100K<n<1M #license-mit #arxiv-2308.06862 #region-us
Myket Android Application Install Dataset ========================================= This dataset contains information on application install interactions of users in the Myket android application market. The dataset was created for the purpose of evaluating interaction prediction models, requiring user and item identifiers along with timestamps of the interactions. Data Creation ------------- The dataset was initially generated by the Myket data team, and later cleaned and subsampled by Erfan Loghmani a master student at Sharif University of Technology at the time. The data team focused on a two-week period and randomly sampled 1/3 of the users with interactions during that period. They then selected install and update interactions for three months before and after the two-week period, resulting in interactions spanning about 6 months and two weeks. We further subsampled and cleaned the data to focus on application download interactions. We identified the top 8000 most installed applications and selected interactions related to them. We retained users with more than 32 interactions, resulting in 280,391 users. From this group, we randomly selected 10,000 users, and the data was filtered to include only interactions for these users. The detailed procedure can be found in here. Data Structure -------------- The dataset has two main files. * 'URL': This file contains the interaction information and follows the same format as the datasets used in the "JODIE: Predicting Dynamic Embedding Trajectory in Temporal Interaction Networks" (ACM SIGKDD 2019) project. However, this data does not contain state labels and interaction features, resulting in associated columns being all zero. * 'app\_info\_sample.csv': This file comprises features associated with applications present in the sample. For each individual application, information such as the approximate number of installs, average rating, count of ratings, and category are included. These features provide insights into the applications present in the dataset. Dataset Details --------------- * Total Instances: 694,121 install interaction instances * Instances Format: Triplets of user\_id, app\_name, timestamp * 10,000 users and 7,988 android applications For a detailed summary of the data's statistics, including information on users, applications, and interactions, please refer to the Python notebook available at URL. The notebook provides an overview of the dataset's characteristics and can be helpful for understanding the data's structure before using it for research or analysis. ### Top 20 Most Installed Applications Comparison with SNAP Datasets ----------------------------- The Myket dataset introduced in this repository exhibits distinct characteristics compared to the real-world datasets used by the project. The table below provides a comparative overview of the key dataset characteristics: The Myket dataset stands out by having an ample number of both users and items, highlighting its relevance for real-world, large-scale applications. Unlike LastFM, Reddit, and Wikipedia datasets, where users exhibit repetitive item interactions, the Myket dataset contains a comparatively lower amount of repetitive interactions. This unique characteristic reflects the diverse nature of user behaviors in the Android application market environment. If you use this dataset in your research, please cite the following preprint:
[ "### Top 20 Most Installed Applications\n\n\n\nComparison with SNAP Datasets\n-----------------------------\n\n\nThe Myket dataset introduced in this repository exhibits distinct characteristics compared to the real-world datasets used by the project. The table below provides a comparative overview of the key dataset characteristics:\n\n\n\nThe Myket dataset stands out by having an ample number of both users and items, highlighting its relevance for real-world, large-scale applications. Unlike LastFM, Reddit, and Wikipedia datasets, where users exhibit repetitive item interactions, the Myket dataset contains a comparatively lower amount of repetitive interactions. This unique characteristic reflects the diverse nature of user behaviors in the Android application market environment.\n\n\nIf you use this dataset in your research, please cite the following preprint:" ]
[ "TAGS\n#task_categories-graph-ml #size_categories-100K<n<1M #license-mit #arxiv-2308.06862 #region-us \n", "### Top 20 Most Installed Applications\n\n\n\nComparison with SNAP Datasets\n-----------------------------\n\n\nThe Myket dataset introduced in this repository exhibits distinct characteristics compared to the real-world datasets used by the project. The table below provides a comparative overview of the key dataset characteristics:\n\n\n\nThe Myket dataset stands out by having an ample number of both users and items, highlighting its relevance for real-world, large-scale applications. Unlike LastFM, Reddit, and Wikipedia datasets, where users exhibit repetitive item interactions, the Myket dataset contains a comparatively lower amount of repetitive interactions. This unique characteristic reflects the diverse nature of user behaviors in the Android application market environment.\n\n\nIf you use this dataset in your research, please cite the following preprint:" ]
[ 42, 183 ]
[ "passage: TAGS\n#task_categories-graph-ml #size_categories-100K<n<1M #license-mit #arxiv-2308.06862 #region-us \n### Top 20 Most Installed Applications\n\n\n\nComparison with SNAP Datasets\n-----------------------------\n\n\nThe Myket dataset introduced in this repository exhibits distinct characteristics compared to the real-world datasets used by the project. The table below provides a comparative overview of the key dataset characteristics:\n\n\n\nThe Myket dataset stands out by having an ample number of both users and items, highlighting its relevance for real-world, large-scale applications. Unlike LastFM, Reddit, and Wikipedia datasets, where users exhibit repetitive item interactions, the Myket dataset contains a comparatively lower amount of repetitive interactions. This unique characteristic reflects the diverse nature of user behaviors in the Android application market environment.\n\n\nIf you use this dataset in your research, please cite the following preprint:" ]
dffd85ec49dfd5987f0b9a922de3337e8b6f31d3
# Dataset Card for "Spirit_BERT_Baseline" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
EgilKarlsen/Spirit_BERT_Baseline
[ "region:us" ]
2023-08-18T16:16:01+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "0", "dtype": "float32"}, {"name": "1", "dtype": "float32"}, {"name": "2", "dtype": "float32"}, {"name": "3", "dtype": "float32"}, {"name": "4", "dtype": "float32"}, {"name": "5", "dtype": "float32"}, {"name": "6", "dtype": "float32"}, {"name": "7", "dtype": "float32"}, {"name": "8", "dtype": "float32"}, {"name": "9", "dtype": "float32"}, {"name": "10", "dtype": "float32"}, {"name": "11", "dtype": "float32"}, {"name": "12", "dtype": "float32"}, {"name": "13", "dtype": "float32"}, {"name": "14", "dtype": "float32"}, {"name": "15", "dtype": "float32"}, {"name": "16", "dtype": "float32"}, {"name": "17", "dtype": "float32"}, {"name": "18", "dtype": "float32"}, {"name": "19", "dtype": "float32"}, {"name": "20", "dtype": "float32"}, {"name": "21", "dtype": "float32"}, {"name": "22", "dtype": "float32"}, {"name": "23", "dtype": "float32"}, {"name": "24", "dtype": "float32"}, {"name": "25", "dtype": "float32"}, {"name": "26", "dtype": "float32"}, {"name": "27", "dtype": "float32"}, {"name": "28", "dtype": "float32"}, {"name": "29", "dtype": "float32"}, {"name": "30", "dtype": "float32"}, {"name": "31", "dtype": "float32"}, {"name": "32", "dtype": "float32"}, {"name": "33", "dtype": "float32"}, {"name": "34", "dtype": "float32"}, {"name": "35", "dtype": "float32"}, {"name": "36", "dtype": "float32"}, {"name": "37", "dtype": "float32"}, {"name": "38", "dtype": "float32"}, {"name": "39", "dtype": "float32"}, {"name": "40", "dtype": "float32"}, {"name": "41", "dtype": "float32"}, {"name": "42", "dtype": "float32"}, {"name": "43", "dtype": "float32"}, {"name": "44", "dtype": "float32"}, {"name": "45", "dtype": "float32"}, {"name": "46", "dtype": "float32"}, {"name": "47", "dtype": "float32"}, {"name": "48", "dtype": "float32"}, {"name": "49", "dtype": "float32"}, {"name": "50", "dtype": "float32"}, {"name": "51", "dtype": "float32"}, {"name": "52", "dtype": "float32"}, {"name": "53", "dtype": "float32"}, {"name": "54", "dtype": "float32"}, {"name": "55", "dtype": "float32"}, {"name": "56", "dtype": "float32"}, {"name": "57", "dtype": "float32"}, {"name": "58", "dtype": "float32"}, {"name": "59", "dtype": "float32"}, {"name": "60", "dtype": "float32"}, {"name": "61", "dtype": "float32"}, {"name": "62", "dtype": "float32"}, {"name": "63", "dtype": "float32"}, {"name": "64", "dtype": "float32"}, {"name": "65", "dtype": "float32"}, {"name": "66", "dtype": "float32"}, {"name": "67", "dtype": "float32"}, {"name": "68", "dtype": "float32"}, {"name": "69", "dtype": "float32"}, {"name": "70", "dtype": "float32"}, {"name": "71", "dtype": "float32"}, {"name": "72", "dtype": "float32"}, {"name": "73", "dtype": "float32"}, {"name": "74", "dtype": "float32"}, {"name": "75", "dtype": "float32"}, {"name": "76", "dtype": "float32"}, {"name": "77", "dtype": "float32"}, {"name": "78", "dtype": "float32"}, {"name": "79", "dtype": "float32"}, {"name": "80", "dtype": "float32"}, {"name": "81", "dtype": "float32"}, {"name": "82", "dtype": "float32"}, {"name": "83", "dtype": "float32"}, {"name": "84", "dtype": "float32"}, {"name": "85", "dtype": "float32"}, {"name": "86", "dtype": "float32"}, {"name": "87", "dtype": "float32"}, {"name": "88", "dtype": "float32"}, {"name": "89", "dtype": "float32"}, {"name": "90", "dtype": "float32"}, {"name": "91", "dtype": "float32"}, {"name": "92", "dtype": "float32"}, {"name": "93", "dtype": "float32"}, {"name": "94", "dtype": "float32"}, {"name": "95", "dtype": "float32"}, {"name": "96", "dtype": "float32"}, {"name": "97", "dtype": "float32"}, {"name": "98", "dtype": "float32"}, {"name": "99", "dtype": "float32"}, {"name": "100", "dtype": "float32"}, {"name": "101", "dtype": "float32"}, {"name": "102", "dtype": "float32"}, {"name": "103", "dtype": "float32"}, {"name": "104", "dtype": "float32"}, {"name": "105", "dtype": "float32"}, {"name": "106", "dtype": "float32"}, {"name": "107", "dtype": "float32"}, {"name": "108", "dtype": "float32"}, {"name": "109", "dtype": "float32"}, {"name": "110", "dtype": "float32"}, {"name": "111", "dtype": "float32"}, {"name": "112", "dtype": "float32"}, {"name": "113", "dtype": "float32"}, {"name": "114", "dtype": "float32"}, {"name": "115", "dtype": "float32"}, {"name": "116", "dtype": "float32"}, {"name": "117", "dtype": "float32"}, {"name": "118", "dtype": "float32"}, {"name": "119", "dtype": "float32"}, {"name": "120", "dtype": "float32"}, {"name": "121", "dtype": "float32"}, {"name": "122", "dtype": "float32"}, {"name": "123", "dtype": "float32"}, {"name": "124", "dtype": "float32"}, {"name": "125", "dtype": "float32"}, {"name": "126", "dtype": "float32"}, {"name": "127", "dtype": "float32"}, {"name": "128", "dtype": "float32"}, {"name": "129", "dtype": "float32"}, {"name": "130", "dtype": "float32"}, {"name": "131", "dtype": "float32"}, {"name": "132", "dtype": "float32"}, {"name": "133", "dtype": "float32"}, {"name": "134", "dtype": "float32"}, {"name": "135", "dtype": "float32"}, {"name": "136", "dtype": "float32"}, {"name": "137", "dtype": "float32"}, {"name": "138", "dtype": "float32"}, {"name": "139", "dtype": "float32"}, {"name": "140", "dtype": "float32"}, {"name": "141", "dtype": "float32"}, {"name": "142", "dtype": "float32"}, {"name": "143", "dtype": "float32"}, {"name": "144", "dtype": "float32"}, {"name": "145", "dtype": "float32"}, {"name": "146", "dtype": "float32"}, {"name": "147", "dtype": "float32"}, {"name": "148", "dtype": "float32"}, {"name": "149", "dtype": "float32"}, {"name": "150", "dtype": "float32"}, {"name": "151", "dtype": "float32"}, {"name": "152", "dtype": "float32"}, {"name": "153", "dtype": "float32"}, {"name": "154", "dtype": "float32"}, {"name": "155", "dtype": "float32"}, {"name": "156", "dtype": "float32"}, {"name": "157", "dtype": "float32"}, {"name": "158", "dtype": "float32"}, {"name": "159", "dtype": "float32"}, {"name": "160", "dtype": "float32"}, {"name": "161", "dtype": "float32"}, {"name": "162", "dtype": "float32"}, {"name": "163", "dtype": "float32"}, {"name": "164", "dtype": "float32"}, {"name": "165", "dtype": "float32"}, {"name": "166", "dtype": "float32"}, {"name": "167", "dtype": "float32"}, {"name": "168", "dtype": "float32"}, {"name": "169", "dtype": "float32"}, {"name": "170", "dtype": "float32"}, {"name": "171", "dtype": "float32"}, {"name": "172", "dtype": "float32"}, {"name": "173", "dtype": "float32"}, {"name": "174", "dtype": "float32"}, {"name": "175", "dtype": "float32"}, {"name": "176", "dtype": "float32"}, {"name": "177", "dtype": "float32"}, {"name": "178", "dtype": "float32"}, {"name": "179", "dtype": "float32"}, {"name": "180", "dtype": "float32"}, {"name": "181", "dtype": "float32"}, {"name": "182", "dtype": "float32"}, {"name": "183", "dtype": "float32"}, {"name": "184", "dtype": "float32"}, {"name": "185", "dtype": "float32"}, {"name": "186", "dtype": "float32"}, {"name": "187", "dtype": "float32"}, {"name": "188", "dtype": "float32"}, {"name": "189", "dtype": "float32"}, {"name": "190", "dtype": "float32"}, {"name": "191", "dtype": "float32"}, {"name": "192", "dtype": "float32"}, {"name": "193", "dtype": "float32"}, {"name": "194", "dtype": "float32"}, {"name": "195", "dtype": "float32"}, {"name": "196", "dtype": "float32"}, {"name": "197", "dtype": "float32"}, {"name": "198", "dtype": "float32"}, {"name": "199", "dtype": "float32"}, {"name": "200", "dtype": "float32"}, {"name": "201", "dtype": "float32"}, {"name": "202", "dtype": "float32"}, {"name": "203", "dtype": "float32"}, {"name": "204", "dtype": "float32"}, {"name": "205", "dtype": "float32"}, {"name": "206", "dtype": "float32"}, {"name": "207", "dtype": "float32"}, {"name": "208", "dtype": "float32"}, {"name": "209", "dtype": "float32"}, {"name": "210", "dtype": "float32"}, {"name": "211", "dtype": "float32"}, {"name": "212", "dtype": "float32"}, {"name": "213", "dtype": "float32"}, {"name": "214", "dtype": "float32"}, {"name": "215", "dtype": "float32"}, {"name": "216", "dtype": "float32"}, {"name": "217", "dtype": "float32"}, {"name": "218", "dtype": "float32"}, {"name": "219", "dtype": "float32"}, {"name": "220", "dtype": "float32"}, {"name": "221", "dtype": "float32"}, {"name": "222", "dtype": "float32"}, {"name": "223", "dtype": "float32"}, {"name": "224", "dtype": "float32"}, {"name": "225", "dtype": "float32"}, {"name": "226", "dtype": "float32"}, {"name": "227", "dtype": "float32"}, {"name": "228", "dtype": "float32"}, {"name": "229", "dtype": "float32"}, {"name": "230", "dtype": "float32"}, {"name": "231", "dtype": "float32"}, {"name": "232", "dtype": "float32"}, {"name": "233", "dtype": "float32"}, {"name": "234", "dtype": "float32"}, {"name": "235", "dtype": "float32"}, {"name": "236", "dtype": "float32"}, {"name": "237", "dtype": "float32"}, {"name": "238", "dtype": "float32"}, {"name": "239", "dtype": "float32"}, {"name": "240", "dtype": "float32"}, {"name": "241", "dtype": "float32"}, {"name": "242", "dtype": "float32"}, {"name": "243", "dtype": "float32"}, {"name": "244", "dtype": "float32"}, {"name": "245", "dtype": "float32"}, {"name": "246", "dtype": "float32"}, {"name": "247", "dtype": "float32"}, {"name": "248", "dtype": "float32"}, {"name": "249", "dtype": "float32"}, {"name": "250", "dtype": "float32"}, {"name": "251", "dtype": "float32"}, {"name": "252", "dtype": "float32"}, {"name": "253", "dtype": "float32"}, {"name": "254", "dtype": "float32"}, {"name": "255", "dtype": "float32"}, {"name": "256", "dtype": "float32"}, {"name": "257", "dtype": "float32"}, {"name": "258", "dtype": "float32"}, {"name": "259", "dtype": "float32"}, {"name": "260", "dtype": "float32"}, {"name": "261", "dtype": "float32"}, {"name": "262", "dtype": "float32"}, {"name": "263", "dtype": "float32"}, {"name": "264", "dtype": "float32"}, {"name": "265", "dtype": "float32"}, {"name": "266", "dtype": "float32"}, {"name": "267", "dtype": "float32"}, {"name": "268", "dtype": "float32"}, {"name": "269", "dtype": "float32"}, {"name": "270", "dtype": "float32"}, {"name": "271", "dtype": "float32"}, {"name": "272", "dtype": "float32"}, {"name": "273", "dtype": "float32"}, {"name": "274", "dtype": "float32"}, {"name": "275", "dtype": "float32"}, {"name": "276", "dtype": "float32"}, {"name": "277", "dtype": "float32"}, {"name": "278", "dtype": "float32"}, {"name": "279", "dtype": "float32"}, {"name": "280", "dtype": "float32"}, {"name": "281", "dtype": "float32"}, {"name": "282", "dtype": "float32"}, {"name": "283", "dtype": "float32"}, {"name": "284", "dtype": "float32"}, {"name": "285", "dtype": "float32"}, {"name": "286", "dtype": "float32"}, {"name": "287", "dtype": "float32"}, {"name": "288", "dtype": "float32"}, {"name": "289", "dtype": "float32"}, {"name": "290", "dtype": "float32"}, {"name": "291", "dtype": "float32"}, {"name": "292", "dtype": "float32"}, {"name": "293", "dtype": "float32"}, {"name": "294", "dtype": "float32"}, {"name": "295", "dtype": "float32"}, {"name": "296", "dtype": "float32"}, {"name": "297", "dtype": "float32"}, {"name": "298", "dtype": "float32"}, {"name": "299", "dtype": "float32"}, {"name": "300", "dtype": "float32"}, {"name": "301", "dtype": "float32"}, {"name": "302", "dtype": "float32"}, {"name": "303", "dtype": "float32"}, {"name": "304", "dtype": "float32"}, {"name": "305", "dtype": "float32"}, {"name": "306", "dtype": "float32"}, {"name": "307", "dtype": "float32"}, {"name": "308", "dtype": "float32"}, {"name": "309", "dtype": "float32"}, {"name": "310", "dtype": "float32"}, {"name": "311", "dtype": "float32"}, {"name": "312", "dtype": "float32"}, {"name": "313", "dtype": "float32"}, {"name": "314", "dtype": "float32"}, {"name": "315", "dtype": "float32"}, {"name": "316", "dtype": "float32"}, {"name": "317", "dtype": "float32"}, {"name": "318", "dtype": "float32"}, {"name": "319", "dtype": "float32"}, {"name": "320", "dtype": "float32"}, {"name": "321", "dtype": "float32"}, {"name": "322", "dtype": "float32"}, {"name": "323", "dtype": "float32"}, {"name": "324", "dtype": "float32"}, {"name": "325", "dtype": "float32"}, {"name": "326", "dtype": "float32"}, {"name": "327", "dtype": "float32"}, {"name": "328", "dtype": "float32"}, {"name": "329", "dtype": "float32"}, {"name": "330", "dtype": "float32"}, {"name": "331", "dtype": "float32"}, {"name": "332", "dtype": "float32"}, {"name": "333", "dtype": "float32"}, {"name": "334", "dtype": "float32"}, {"name": "335", "dtype": "float32"}, {"name": "336", "dtype": "float32"}, {"name": "337", "dtype": "float32"}, {"name": "338", "dtype": "float32"}, {"name": "339", "dtype": "float32"}, {"name": "340", "dtype": "float32"}, {"name": "341", "dtype": "float32"}, {"name": "342", "dtype": "float32"}, {"name": "343", "dtype": "float32"}, {"name": "344", "dtype": "float32"}, {"name": "345", "dtype": "float32"}, {"name": "346", "dtype": "float32"}, {"name": "347", "dtype": "float32"}, {"name": "348", "dtype": "float32"}, {"name": "349", "dtype": "float32"}, {"name": "350", "dtype": "float32"}, {"name": "351", "dtype": "float32"}, {"name": "352", "dtype": "float32"}, {"name": "353", "dtype": "float32"}, {"name": "354", "dtype": "float32"}, {"name": "355", "dtype": "float32"}, {"name": "356", "dtype": "float32"}, {"name": "357", "dtype": "float32"}, {"name": "358", "dtype": "float32"}, {"name": "359", "dtype": "float32"}, {"name": "360", "dtype": "float32"}, {"name": "361", "dtype": "float32"}, {"name": "362", "dtype": "float32"}, {"name": "363", "dtype": "float32"}, {"name": "364", "dtype": "float32"}, {"name": "365", "dtype": "float32"}, {"name": "366", "dtype": "float32"}, {"name": "367", "dtype": "float32"}, {"name": "368", "dtype": "float32"}, {"name": "369", "dtype": "float32"}, {"name": "370", "dtype": "float32"}, {"name": "371", "dtype": "float32"}, {"name": "372", "dtype": "float32"}, {"name": "373", "dtype": "float32"}, {"name": "374", "dtype": "float32"}, {"name": "375", "dtype": "float32"}, {"name": "376", "dtype": "float32"}, {"name": "377", "dtype": "float32"}, {"name": "378", "dtype": "float32"}, {"name": "379", "dtype": "float32"}, {"name": "380", "dtype": "float32"}, {"name": "381", "dtype": "float32"}, {"name": "382", "dtype": "float32"}, {"name": "383", "dtype": "float32"}, {"name": "384", "dtype": "float32"}, {"name": "385", "dtype": "float32"}, {"name": "386", "dtype": "float32"}, {"name": "387", "dtype": "float32"}, {"name": "388", "dtype": "float32"}, {"name": "389", "dtype": "float32"}, {"name": "390", "dtype": "float32"}, {"name": "391", "dtype": "float32"}, {"name": "392", "dtype": "float32"}, {"name": "393", "dtype": "float32"}, {"name": "394", "dtype": "float32"}, {"name": "395", "dtype": "float32"}, {"name": "396", "dtype": "float32"}, {"name": "397", "dtype": "float32"}, {"name": "398", "dtype": "float32"}, {"name": "399", "dtype": "float32"}, {"name": "400", "dtype": "float32"}, {"name": "401", "dtype": "float32"}, {"name": "402", "dtype": "float32"}, {"name": "403", "dtype": "float32"}, {"name": "404", "dtype": "float32"}, {"name": "405", "dtype": "float32"}, {"name": "406", "dtype": "float32"}, {"name": "407", "dtype": "float32"}, {"name": "408", "dtype": "float32"}, {"name": "409", "dtype": "float32"}, {"name": "410", "dtype": "float32"}, {"name": "411", "dtype": "float32"}, {"name": "412", "dtype": "float32"}, {"name": "413", "dtype": "float32"}, {"name": "414", "dtype": "float32"}, {"name": "415", "dtype": "float32"}, {"name": "416", "dtype": "float32"}, {"name": "417", "dtype": "float32"}, {"name": "418", "dtype": "float32"}, {"name": "419", "dtype": "float32"}, {"name": "420", "dtype": "float32"}, {"name": "421", "dtype": "float32"}, {"name": "422", "dtype": "float32"}, {"name": "423", "dtype": "float32"}, {"name": "424", "dtype": "float32"}, {"name": "425", "dtype": "float32"}, {"name": "426", "dtype": "float32"}, {"name": "427", "dtype": "float32"}, {"name": "428", "dtype": "float32"}, {"name": "429", "dtype": "float32"}, {"name": "430", "dtype": "float32"}, {"name": "431", "dtype": "float32"}, {"name": "432", "dtype": "float32"}, {"name": "433", "dtype": "float32"}, {"name": "434", "dtype": "float32"}, {"name": "435", "dtype": "float32"}, {"name": "436", "dtype": "float32"}, {"name": "437", "dtype": "float32"}, {"name": "438", "dtype": "float32"}, {"name": "439", "dtype": "float32"}, {"name": "440", "dtype": "float32"}, {"name": "441", "dtype": "float32"}, {"name": "442", "dtype": "float32"}, {"name": "443", "dtype": "float32"}, {"name": "444", "dtype": "float32"}, {"name": "445", "dtype": "float32"}, {"name": "446", "dtype": "float32"}, {"name": "447", "dtype": "float32"}, {"name": "448", "dtype": "float32"}, {"name": "449", "dtype": "float32"}, {"name": "450", "dtype": "float32"}, {"name": "451", "dtype": "float32"}, {"name": "452", "dtype": "float32"}, {"name": "453", "dtype": "float32"}, {"name": "454", "dtype": "float32"}, {"name": "455", "dtype": "float32"}, {"name": "456", "dtype": "float32"}, {"name": "457", "dtype": "float32"}, {"name": "458", "dtype": "float32"}, {"name": "459", "dtype": "float32"}, {"name": "460", "dtype": "float32"}, {"name": "461", "dtype": "float32"}, {"name": "462", "dtype": "float32"}, {"name": "463", "dtype": "float32"}, {"name": "464", "dtype": "float32"}, {"name": "465", "dtype": "float32"}, {"name": "466", "dtype": "float32"}, {"name": "467", "dtype": "float32"}, {"name": "468", "dtype": "float32"}, {"name": "469", "dtype": "float32"}, {"name": "470", "dtype": "float32"}, {"name": "471", "dtype": "float32"}, {"name": "472", "dtype": "float32"}, {"name": "473", "dtype": "float32"}, {"name": "474", "dtype": "float32"}, {"name": "475", "dtype": "float32"}, {"name": "476", "dtype": "float32"}, {"name": "477", "dtype": "float32"}, {"name": "478", "dtype": "float32"}, {"name": "479", "dtype": "float32"}, {"name": "480", "dtype": "float32"}, {"name": "481", "dtype": "float32"}, {"name": "482", "dtype": "float32"}, {"name": "483", "dtype": "float32"}, {"name": "484", "dtype": "float32"}, {"name": "485", "dtype": "float32"}, {"name": "486", "dtype": "float32"}, {"name": "487", "dtype": "float32"}, {"name": "488", "dtype": "float32"}, {"name": "489", "dtype": "float32"}, {"name": "490", "dtype": "float32"}, {"name": "491", "dtype": "float32"}, {"name": "492", "dtype": "float32"}, {"name": "493", "dtype": "float32"}, {"name": "494", "dtype": "float32"}, {"name": "495", "dtype": "float32"}, {"name": "496", "dtype": "float32"}, {"name": "497", "dtype": "float32"}, {"name": "498", "dtype": "float32"}, {"name": "499", "dtype": "float32"}, {"name": "500", "dtype": "float32"}, {"name": "501", "dtype": "float32"}, {"name": "502", "dtype": "float32"}, {"name": "503", "dtype": "float32"}, {"name": "504", "dtype": "float32"}, {"name": "505", "dtype": "float32"}, {"name": "506", "dtype": "float32"}, {"name": "507", "dtype": "float32"}, {"name": "508", "dtype": "float32"}, {"name": "509", "dtype": "float32"}, {"name": "510", "dtype": "float32"}, {"name": "511", "dtype": "float32"}, {"name": "512", "dtype": "float32"}, {"name": "513", "dtype": "float32"}, {"name": "514", "dtype": "float32"}, {"name": "515", "dtype": "float32"}, {"name": "516", "dtype": "float32"}, {"name": "517", "dtype": "float32"}, {"name": "518", "dtype": "float32"}, {"name": "519", "dtype": "float32"}, {"name": "520", "dtype": "float32"}, {"name": "521", "dtype": "float32"}, {"name": "522", "dtype": "float32"}, {"name": "523", "dtype": "float32"}, {"name": "524", "dtype": "float32"}, {"name": "525", "dtype": "float32"}, {"name": "526", "dtype": "float32"}, {"name": "527", "dtype": "float32"}, {"name": "528", "dtype": "float32"}, {"name": "529", "dtype": "float32"}, {"name": "530", "dtype": "float32"}, {"name": "531", "dtype": "float32"}, {"name": "532", "dtype": "float32"}, {"name": "533", "dtype": "float32"}, {"name": "534", "dtype": "float32"}, {"name": "535", "dtype": "float32"}, {"name": "536", "dtype": "float32"}, {"name": "537", "dtype": "float32"}, {"name": "538", "dtype": "float32"}, {"name": "539", "dtype": "float32"}, {"name": "540", "dtype": "float32"}, {"name": "541", "dtype": "float32"}, {"name": "542", "dtype": "float32"}, {"name": "543", "dtype": "float32"}, {"name": "544", "dtype": "float32"}, {"name": "545", "dtype": "float32"}, {"name": "546", "dtype": "float32"}, {"name": "547", "dtype": "float32"}, {"name": "548", "dtype": "float32"}, {"name": "549", "dtype": "float32"}, {"name": "550", "dtype": "float32"}, {"name": "551", "dtype": "float32"}, {"name": "552", "dtype": "float32"}, {"name": "553", "dtype": "float32"}, {"name": "554", "dtype": "float32"}, {"name": "555", "dtype": "float32"}, {"name": "556", "dtype": "float32"}, {"name": "557", "dtype": "float32"}, {"name": "558", "dtype": "float32"}, {"name": "559", "dtype": "float32"}, {"name": "560", "dtype": "float32"}, {"name": "561", "dtype": "float32"}, {"name": "562", "dtype": "float32"}, {"name": "563", "dtype": "float32"}, {"name": "564", "dtype": "float32"}, {"name": "565", "dtype": "float32"}, {"name": "566", "dtype": "float32"}, {"name": "567", "dtype": "float32"}, {"name": "568", "dtype": "float32"}, {"name": "569", "dtype": "float32"}, {"name": "570", "dtype": "float32"}, {"name": "571", "dtype": "float32"}, {"name": "572", "dtype": "float32"}, {"name": "573", "dtype": "float32"}, {"name": "574", "dtype": "float32"}, {"name": "575", "dtype": "float32"}, {"name": "576", "dtype": "float32"}, {"name": "577", "dtype": "float32"}, {"name": "578", "dtype": "float32"}, {"name": "579", "dtype": "float32"}, {"name": "580", "dtype": "float32"}, {"name": "581", "dtype": "float32"}, {"name": "582", "dtype": "float32"}, {"name": "583", "dtype": "float32"}, {"name": "584", "dtype": "float32"}, {"name": "585", "dtype": "float32"}, {"name": "586", "dtype": "float32"}, {"name": "587", "dtype": "float32"}, {"name": "588", "dtype": "float32"}, {"name": "589", "dtype": "float32"}, {"name": "590", "dtype": "float32"}, {"name": "591", "dtype": "float32"}, {"name": "592", "dtype": "float32"}, {"name": "593", "dtype": "float32"}, {"name": "594", "dtype": "float32"}, {"name": "595", "dtype": "float32"}, {"name": "596", "dtype": "float32"}, {"name": "597", "dtype": "float32"}, {"name": "598", "dtype": "float32"}, {"name": "599", "dtype": "float32"}, {"name": "600", "dtype": "float32"}, {"name": "601", "dtype": "float32"}, {"name": "602", "dtype": "float32"}, {"name": "603", "dtype": "float32"}, {"name": "604", "dtype": "float32"}, {"name": "605", "dtype": "float32"}, {"name": "606", "dtype": "float32"}, {"name": "607", "dtype": "float32"}, {"name": "608", "dtype": "float32"}, {"name": "609", "dtype": "float32"}, {"name": "610", "dtype": "float32"}, {"name": "611", "dtype": "float32"}, {"name": "612", "dtype": "float32"}, {"name": "613", "dtype": "float32"}, {"name": "614", "dtype": "float32"}, {"name": "615", "dtype": "float32"}, {"name": "616", "dtype": "float32"}, {"name": "617", "dtype": "float32"}, {"name": "618", "dtype": "float32"}, {"name": "619", "dtype": "float32"}, {"name": "620", "dtype": "float32"}, {"name": "621", "dtype": "float32"}, {"name": "622", "dtype": "float32"}, {"name": "623", "dtype": "float32"}, {"name": "624", "dtype": "float32"}, {"name": "625", "dtype": "float32"}, {"name": "626", "dtype": "float32"}, {"name": "627", "dtype": "float32"}, {"name": "628", "dtype": "float32"}, {"name": "629", "dtype": "float32"}, {"name": "630", "dtype": "float32"}, {"name": "631", "dtype": "float32"}, {"name": "632", "dtype": "float32"}, {"name": "633", "dtype": "float32"}, {"name": "634", "dtype": "float32"}, {"name": "635", "dtype": "float32"}, {"name": "636", "dtype": "float32"}, {"name": "637", "dtype": "float32"}, {"name": "638", "dtype": "float32"}, {"name": "639", "dtype": "float32"}, {"name": "640", "dtype": "float32"}, {"name": "641", "dtype": "float32"}, {"name": "642", "dtype": "float32"}, {"name": "643", "dtype": "float32"}, {"name": "644", "dtype": "float32"}, {"name": "645", "dtype": "float32"}, {"name": "646", "dtype": "float32"}, {"name": "647", "dtype": "float32"}, {"name": "648", "dtype": "float32"}, {"name": "649", "dtype": "float32"}, {"name": "650", "dtype": "float32"}, {"name": "651", "dtype": "float32"}, {"name": "652", "dtype": "float32"}, {"name": "653", "dtype": "float32"}, {"name": "654", "dtype": "float32"}, {"name": "655", "dtype": "float32"}, {"name": "656", "dtype": "float32"}, {"name": "657", "dtype": "float32"}, {"name": "658", "dtype": "float32"}, {"name": "659", "dtype": "float32"}, {"name": "660", "dtype": "float32"}, {"name": "661", "dtype": "float32"}, {"name": "662", "dtype": "float32"}, {"name": "663", "dtype": "float32"}, {"name": "664", "dtype": "float32"}, {"name": "665", "dtype": "float32"}, {"name": "666", "dtype": "float32"}, {"name": "667", "dtype": "float32"}, {"name": "668", "dtype": "float32"}, {"name": "669", "dtype": "float32"}, {"name": "670", "dtype": "float32"}, {"name": "671", "dtype": "float32"}, {"name": "672", "dtype": "float32"}, {"name": "673", "dtype": "float32"}, {"name": "674", "dtype": "float32"}, {"name": "675", "dtype": "float32"}, {"name": "676", "dtype": "float32"}, {"name": "677", "dtype": "float32"}, {"name": "678", "dtype": "float32"}, {"name": "679", "dtype": "float32"}, {"name": "680", "dtype": "float32"}, {"name": "681", "dtype": "float32"}, {"name": "682", "dtype": "float32"}, {"name": "683", "dtype": "float32"}, {"name": "684", "dtype": "float32"}, {"name": "685", "dtype": "float32"}, {"name": "686", "dtype": "float32"}, {"name": "687", "dtype": "float32"}, {"name": "688", "dtype": "float32"}, {"name": "689", "dtype": "float32"}, {"name": "690", "dtype": "float32"}, {"name": "691", "dtype": "float32"}, {"name": "692", "dtype": "float32"}, {"name": "693", "dtype": "float32"}, {"name": "694", "dtype": "float32"}, {"name": "695", "dtype": "float32"}, {"name": "696", "dtype": "float32"}, {"name": "697", "dtype": "float32"}, {"name": "698", "dtype": "float32"}, {"name": "699", "dtype": "float32"}, {"name": "700", "dtype": "float32"}, {"name": "701", "dtype": "float32"}, {"name": "702", "dtype": "float32"}, {"name": "703", "dtype": "float32"}, {"name": "704", "dtype": "float32"}, {"name": "705", "dtype": "float32"}, {"name": "706", "dtype": "float32"}, {"name": "707", "dtype": "float32"}, {"name": "708", "dtype": "float32"}, {"name": "709", "dtype": "float32"}, {"name": "710", "dtype": "float32"}, {"name": "711", "dtype": "float32"}, {"name": "712", "dtype": "float32"}, {"name": "713", "dtype": "float32"}, {"name": "714", "dtype": "float32"}, {"name": "715", "dtype": "float32"}, {"name": "716", "dtype": "float32"}, {"name": "717", "dtype": "float32"}, {"name": "718", "dtype": "float32"}, {"name": "719", "dtype": "float32"}, {"name": "720", "dtype": "float32"}, {"name": "721", "dtype": "float32"}, {"name": "722", "dtype": "float32"}, {"name": "723", "dtype": "float32"}, {"name": "724", "dtype": "float32"}, {"name": "725", "dtype": "float32"}, {"name": "726", "dtype": "float32"}, {"name": "727", "dtype": "float32"}, {"name": "728", "dtype": "float32"}, {"name": "729", "dtype": "float32"}, {"name": "730", "dtype": "float32"}, {"name": "731", "dtype": "float32"}, {"name": "732", "dtype": "float32"}, {"name": "733", "dtype": "float32"}, {"name": "734", "dtype": "float32"}, {"name": "735", "dtype": "float32"}, {"name": "736", "dtype": "float32"}, {"name": "737", "dtype": "float32"}, {"name": "738", "dtype": "float32"}, {"name": "739", "dtype": "float32"}, {"name": "740", "dtype": "float32"}, {"name": "741", "dtype": "float32"}, {"name": "742", "dtype": "float32"}, {"name": "743", "dtype": "float32"}, {"name": "744", "dtype": "float32"}, {"name": "745", "dtype": "float32"}, {"name": "746", "dtype": "float32"}, {"name": "747", "dtype": "float32"}, {"name": "748", "dtype": "float32"}, {"name": "749", "dtype": "float32"}, {"name": "750", "dtype": "float32"}, {"name": "751", "dtype": "float32"}, {"name": "752", "dtype": "float32"}, {"name": "753", "dtype": "float32"}, {"name": "754", "dtype": "float32"}, {"name": "755", "dtype": "float32"}, {"name": "756", "dtype": "float32"}, {"name": "757", "dtype": "float32"}, {"name": "758", "dtype": "float32"}, {"name": "759", "dtype": "float32"}, {"name": "760", "dtype": "float32"}, {"name": "761", "dtype": "float32"}, {"name": "762", "dtype": "float32"}, {"name": "763", "dtype": "float32"}, {"name": "764", "dtype": "float32"}, {"name": "765", "dtype": "float32"}, {"name": "766", "dtype": "float32"}, {"name": "767", "dtype": "float32"}, {"name": "label", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 115650065.625, "num_examples": 37500}, {"name": "test", "num_bytes": 38550020.0, "num_examples": 12500}], "download_size": 211761700, "dataset_size": 154200085.625}}
2023-08-18T16:21:29+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Spirit_BERT_Baseline" More Information needed
[ "# Dataset Card for \"Spirit_BERT_Baseline\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Spirit_BERT_Baseline\"\n\nMore Information needed" ]
[ 6, 19 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"Spirit_BERT_Baseline\"\n\nMore Information needed" ]
1f5192f33067c46c4937a1dd8e9a9e58c0beb939
# Dataset Card for "vishing_data" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
BaekRok/vishing_data
[ "region:us" ]
2023-08-18T16:20:33+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "validation", "path": "data/validation-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "audio", "dtype": "audio"}, {"name": "labels", "list": [{"name": "start", "dtype": "float64"}, {"name": "text", "dtype": "string"}, {"name": "end", "dtype": "float64"}, {"name": "speaker", "dtype": "string"}, {"name": "label", "dtype": "string"}, {"name": "seg_num", "dtype": "int64"}, {"name": "total_seg", "dtype": "int64"}]}, {"name": "prob", "dtype": "float64"}], "splits": [{"name": "train", "num_bytes": 48988768453.712, "num_examples": 16496}, {"name": "validation", "num_bytes": 8026214010.768, "num_examples": 2071}, {"name": "test", "num_bytes": 8851253927.312, "num_examples": 2156}], "download_size": 16900478025, "dataset_size": 65866236391.79199}}
2023-08-19T00:24:22+00:00
[]
[]
TAGS #region-us
# Dataset Card for "vishing_data" More Information needed
[ "# Dataset Card for \"vishing_data\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"vishing_data\"\n\nMore Information needed" ]
[ 6, 14 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"vishing_data\"\n\nMore Information needed" ]
a63f87502e3d2f7ae439464854afb197cab12510
# Dataset Card for "Spirit_RoBERTa_Baseline" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
EgilKarlsen/Spirit_RoBERTa_Baseline
[ "region:us" ]
2023-08-18T16:22:40+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "0", "dtype": "float32"}, {"name": "1", "dtype": "float32"}, {"name": "2", "dtype": "float32"}, {"name": "3", "dtype": "float32"}, {"name": "4", "dtype": "float32"}, {"name": "5", "dtype": "float32"}, {"name": "6", "dtype": "float32"}, {"name": "7", "dtype": "float32"}, {"name": "8", "dtype": "float32"}, {"name": "9", "dtype": "float32"}, {"name": "10", "dtype": "float32"}, {"name": "11", "dtype": "float32"}, {"name": "12", "dtype": "float32"}, {"name": "13", "dtype": "float32"}, {"name": "14", "dtype": "float32"}, {"name": "15", "dtype": "float32"}, {"name": "16", "dtype": "float32"}, {"name": "17", "dtype": "float32"}, {"name": "18", "dtype": "float32"}, {"name": "19", "dtype": "float32"}, {"name": "20", "dtype": "float32"}, {"name": "21", "dtype": "float32"}, {"name": "22", "dtype": "float32"}, {"name": "23", "dtype": "float32"}, {"name": "24", "dtype": "float32"}, {"name": "25", "dtype": "float32"}, {"name": "26", "dtype": "float32"}, {"name": "27", "dtype": "float32"}, {"name": "28", "dtype": "float32"}, {"name": "29", "dtype": "float32"}, {"name": "30", "dtype": "float32"}, {"name": "31", "dtype": "float32"}, {"name": "32", "dtype": "float32"}, {"name": "33", "dtype": "float32"}, {"name": "34", "dtype": "float32"}, {"name": "35", "dtype": "float32"}, {"name": "36", "dtype": "float32"}, {"name": "37", "dtype": "float32"}, {"name": "38", "dtype": "float32"}, {"name": "39", "dtype": "float32"}, {"name": "40", "dtype": "float32"}, {"name": "41", "dtype": "float32"}, {"name": "42", "dtype": "float32"}, {"name": "43", "dtype": "float32"}, {"name": "44", "dtype": "float32"}, {"name": "45", "dtype": "float32"}, {"name": "46", "dtype": "float32"}, {"name": "47", "dtype": "float32"}, {"name": "48", "dtype": "float32"}, {"name": "49", "dtype": "float32"}, {"name": "50", "dtype": "float32"}, {"name": "51", "dtype": "float32"}, {"name": "52", "dtype": "float32"}, {"name": "53", "dtype": "float32"}, {"name": "54", "dtype": "float32"}, {"name": "55", "dtype": "float32"}, {"name": "56", "dtype": "float32"}, {"name": "57", "dtype": "float32"}, {"name": "58", "dtype": "float32"}, {"name": "59", "dtype": "float32"}, {"name": "60", "dtype": "float32"}, {"name": "61", "dtype": "float32"}, {"name": "62", "dtype": "float32"}, {"name": "63", "dtype": "float32"}, {"name": "64", "dtype": "float32"}, {"name": "65", "dtype": "float32"}, {"name": "66", "dtype": "float32"}, {"name": "67", "dtype": "float32"}, {"name": "68", "dtype": "float32"}, {"name": "69", "dtype": "float32"}, {"name": "70", "dtype": "float32"}, {"name": "71", "dtype": "float32"}, {"name": "72", "dtype": "float32"}, {"name": "73", "dtype": "float32"}, {"name": "74", "dtype": "float32"}, {"name": "75", "dtype": "float32"}, {"name": "76", "dtype": "float32"}, {"name": "77", "dtype": "float32"}, {"name": "78", "dtype": "float32"}, {"name": "79", "dtype": "float32"}, {"name": "80", "dtype": "float32"}, {"name": "81", "dtype": "float32"}, {"name": "82", "dtype": "float32"}, {"name": "83", "dtype": "float32"}, {"name": "84", "dtype": "float32"}, {"name": "85", "dtype": "float32"}, {"name": "86", "dtype": "float32"}, {"name": "87", "dtype": "float32"}, {"name": "88", "dtype": "float32"}, {"name": "89", "dtype": "float32"}, {"name": "90", "dtype": "float32"}, {"name": "91", "dtype": "float32"}, {"name": "92", "dtype": "float32"}, {"name": "93", "dtype": "float32"}, {"name": "94", "dtype": "float32"}, {"name": "95", "dtype": "float32"}, {"name": "96", "dtype": "float32"}, {"name": "97", "dtype": "float32"}, {"name": "98", "dtype": "float32"}, {"name": "99", "dtype": "float32"}, {"name": "100", "dtype": "float32"}, {"name": "101", "dtype": "float32"}, {"name": "102", "dtype": "float32"}, {"name": "103", "dtype": "float32"}, {"name": "104", "dtype": "float32"}, {"name": "105", "dtype": "float32"}, {"name": "106", "dtype": "float32"}, {"name": "107", "dtype": "float32"}, {"name": "108", "dtype": "float32"}, {"name": "109", "dtype": "float32"}, {"name": "110", "dtype": "float32"}, {"name": "111", "dtype": "float32"}, {"name": "112", "dtype": "float32"}, {"name": "113", "dtype": "float32"}, {"name": "114", "dtype": "float32"}, {"name": "115", "dtype": "float32"}, {"name": "116", "dtype": "float32"}, {"name": "117", "dtype": "float32"}, {"name": "118", "dtype": "float32"}, {"name": "119", "dtype": "float32"}, {"name": "120", "dtype": "float32"}, {"name": "121", "dtype": "float32"}, {"name": "122", "dtype": "float32"}, {"name": "123", "dtype": "float32"}, {"name": "124", "dtype": "float32"}, {"name": "125", "dtype": "float32"}, {"name": "126", "dtype": "float32"}, {"name": "127", "dtype": "float32"}, {"name": "128", "dtype": "float32"}, {"name": "129", "dtype": "float32"}, {"name": "130", "dtype": "float32"}, {"name": "131", "dtype": "float32"}, {"name": "132", "dtype": "float32"}, {"name": "133", "dtype": "float32"}, {"name": "134", "dtype": "float32"}, {"name": "135", "dtype": "float32"}, {"name": "136", "dtype": "float32"}, {"name": "137", "dtype": "float32"}, {"name": "138", "dtype": "float32"}, {"name": "139", "dtype": "float32"}, {"name": "140", "dtype": "float32"}, {"name": "141", "dtype": "float32"}, {"name": "142", "dtype": "float32"}, {"name": "143", "dtype": "float32"}, {"name": "144", "dtype": "float32"}, {"name": "145", "dtype": "float32"}, {"name": "146", "dtype": "float32"}, {"name": "147", "dtype": "float32"}, {"name": "148", "dtype": "float32"}, {"name": "149", "dtype": "float32"}, {"name": "150", "dtype": "float32"}, {"name": "151", "dtype": "float32"}, {"name": "152", "dtype": "float32"}, {"name": "153", "dtype": "float32"}, {"name": "154", "dtype": "float32"}, {"name": "155", "dtype": "float32"}, {"name": "156", "dtype": "float32"}, {"name": "157", "dtype": "float32"}, {"name": "158", "dtype": "float32"}, {"name": "159", "dtype": "float32"}, {"name": "160", "dtype": "float32"}, {"name": "161", "dtype": "float32"}, {"name": "162", "dtype": "float32"}, {"name": "163", "dtype": "float32"}, {"name": "164", "dtype": "float32"}, {"name": "165", "dtype": "float32"}, {"name": "166", "dtype": "float32"}, {"name": "167", "dtype": "float32"}, {"name": "168", "dtype": "float32"}, {"name": "169", "dtype": "float32"}, {"name": "170", "dtype": "float32"}, {"name": "171", "dtype": "float32"}, {"name": "172", "dtype": "float32"}, {"name": "173", "dtype": "float32"}, {"name": "174", "dtype": "float32"}, {"name": "175", "dtype": "float32"}, {"name": "176", "dtype": "float32"}, {"name": "177", "dtype": "float32"}, {"name": "178", "dtype": "float32"}, {"name": "179", "dtype": "float32"}, {"name": "180", "dtype": "float32"}, {"name": "181", "dtype": "float32"}, {"name": "182", "dtype": "float32"}, {"name": "183", "dtype": "float32"}, {"name": "184", "dtype": "float32"}, {"name": "185", "dtype": "float32"}, {"name": "186", "dtype": "float32"}, {"name": "187", "dtype": "float32"}, {"name": "188", "dtype": "float32"}, {"name": "189", "dtype": "float32"}, {"name": "190", "dtype": "float32"}, {"name": "191", "dtype": "float32"}, {"name": "192", "dtype": "float32"}, {"name": "193", "dtype": "float32"}, {"name": "194", "dtype": "float32"}, {"name": "195", "dtype": "float32"}, {"name": "196", "dtype": "float32"}, {"name": "197", "dtype": "float32"}, {"name": "198", "dtype": "float32"}, {"name": "199", "dtype": "float32"}, {"name": "200", "dtype": "float32"}, {"name": "201", "dtype": "float32"}, {"name": "202", "dtype": "float32"}, {"name": "203", "dtype": "float32"}, {"name": "204", "dtype": "float32"}, {"name": "205", "dtype": "float32"}, {"name": "206", "dtype": "float32"}, {"name": "207", "dtype": "float32"}, {"name": "208", "dtype": "float32"}, {"name": "209", "dtype": "float32"}, {"name": "210", "dtype": "float32"}, {"name": "211", "dtype": "float32"}, {"name": "212", "dtype": "float32"}, {"name": "213", "dtype": "float32"}, {"name": "214", "dtype": "float32"}, {"name": "215", "dtype": "float32"}, {"name": "216", "dtype": "float32"}, {"name": "217", "dtype": "float32"}, {"name": "218", "dtype": "float32"}, {"name": "219", "dtype": "float32"}, {"name": "220", "dtype": "float32"}, {"name": "221", "dtype": "float32"}, {"name": "222", "dtype": "float32"}, {"name": "223", "dtype": "float32"}, {"name": "224", "dtype": "float32"}, {"name": "225", "dtype": "float32"}, {"name": "226", "dtype": "float32"}, {"name": "227", "dtype": "float32"}, {"name": "228", "dtype": "float32"}, {"name": "229", "dtype": "float32"}, {"name": "230", "dtype": "float32"}, {"name": "231", "dtype": "float32"}, {"name": "232", "dtype": "float32"}, {"name": "233", "dtype": "float32"}, {"name": "234", "dtype": "float32"}, {"name": "235", "dtype": "float32"}, {"name": "236", "dtype": "float32"}, {"name": "237", "dtype": "float32"}, {"name": "238", "dtype": "float32"}, {"name": "239", "dtype": "float32"}, {"name": "240", "dtype": "float32"}, {"name": "241", "dtype": "float32"}, {"name": "242", "dtype": "float32"}, {"name": "243", "dtype": "float32"}, {"name": "244", "dtype": "float32"}, {"name": "245", "dtype": "float32"}, {"name": "246", "dtype": "float32"}, {"name": "247", "dtype": "float32"}, {"name": "248", "dtype": "float32"}, {"name": "249", "dtype": "float32"}, {"name": "250", "dtype": "float32"}, {"name": "251", "dtype": "float32"}, {"name": "252", "dtype": "float32"}, {"name": "253", "dtype": "float32"}, {"name": "254", "dtype": "float32"}, {"name": "255", "dtype": "float32"}, {"name": "256", "dtype": "float32"}, {"name": "257", "dtype": "float32"}, {"name": "258", "dtype": "float32"}, {"name": "259", "dtype": "float32"}, {"name": "260", "dtype": "float32"}, {"name": "261", "dtype": "float32"}, {"name": "262", "dtype": "float32"}, {"name": "263", "dtype": "float32"}, {"name": "264", "dtype": "float32"}, {"name": "265", "dtype": "float32"}, {"name": "266", "dtype": "float32"}, {"name": "267", "dtype": "float32"}, {"name": "268", "dtype": "float32"}, {"name": "269", "dtype": "float32"}, {"name": "270", "dtype": "float32"}, {"name": "271", "dtype": "float32"}, {"name": "272", "dtype": "float32"}, {"name": "273", "dtype": "float32"}, {"name": "274", "dtype": "float32"}, {"name": "275", "dtype": "float32"}, {"name": "276", "dtype": "float32"}, {"name": "277", "dtype": "float32"}, {"name": "278", "dtype": "float32"}, {"name": "279", "dtype": "float32"}, {"name": "280", "dtype": "float32"}, {"name": "281", "dtype": "float32"}, {"name": "282", "dtype": "float32"}, {"name": "283", "dtype": "float32"}, {"name": "284", "dtype": "float32"}, {"name": "285", "dtype": "float32"}, {"name": "286", "dtype": "float32"}, {"name": "287", "dtype": "float32"}, {"name": "288", "dtype": "float32"}, {"name": "289", "dtype": "float32"}, {"name": "290", "dtype": "float32"}, {"name": "291", "dtype": "float32"}, {"name": "292", "dtype": "float32"}, {"name": "293", "dtype": "float32"}, {"name": "294", "dtype": "float32"}, {"name": "295", "dtype": "float32"}, {"name": "296", "dtype": "float32"}, {"name": "297", "dtype": "float32"}, {"name": "298", "dtype": "float32"}, {"name": "299", "dtype": "float32"}, {"name": "300", "dtype": "float32"}, {"name": "301", "dtype": "float32"}, {"name": "302", "dtype": "float32"}, {"name": "303", "dtype": "float32"}, {"name": "304", "dtype": "float32"}, {"name": "305", "dtype": "float32"}, {"name": "306", "dtype": "float32"}, {"name": "307", "dtype": "float32"}, {"name": "308", "dtype": "float32"}, {"name": "309", "dtype": "float32"}, {"name": "310", "dtype": "float32"}, {"name": "311", "dtype": "float32"}, {"name": "312", "dtype": "float32"}, {"name": "313", "dtype": "float32"}, {"name": "314", "dtype": "float32"}, {"name": "315", "dtype": "float32"}, {"name": "316", "dtype": "float32"}, {"name": "317", "dtype": "float32"}, {"name": "318", "dtype": "float32"}, {"name": "319", "dtype": "float32"}, {"name": "320", "dtype": "float32"}, {"name": "321", "dtype": "float32"}, {"name": "322", "dtype": "float32"}, {"name": "323", "dtype": "float32"}, {"name": "324", "dtype": "float32"}, {"name": "325", "dtype": "float32"}, {"name": "326", "dtype": "float32"}, {"name": "327", "dtype": "float32"}, {"name": "328", "dtype": "float32"}, {"name": "329", "dtype": "float32"}, {"name": "330", "dtype": "float32"}, {"name": "331", "dtype": "float32"}, {"name": "332", "dtype": "float32"}, {"name": "333", "dtype": "float32"}, {"name": "334", "dtype": "float32"}, {"name": "335", "dtype": "float32"}, {"name": "336", "dtype": "float32"}, {"name": "337", "dtype": "float32"}, {"name": "338", "dtype": "float32"}, {"name": "339", "dtype": "float32"}, {"name": "340", "dtype": "float32"}, {"name": "341", "dtype": "float32"}, {"name": "342", "dtype": "float32"}, {"name": "343", "dtype": "float32"}, {"name": "344", "dtype": "float32"}, {"name": "345", "dtype": "float32"}, {"name": "346", "dtype": "float32"}, {"name": "347", "dtype": "float32"}, {"name": "348", "dtype": "float32"}, {"name": "349", "dtype": "float32"}, {"name": "350", "dtype": "float32"}, {"name": "351", "dtype": "float32"}, {"name": "352", "dtype": "float32"}, {"name": "353", "dtype": "float32"}, {"name": "354", "dtype": "float32"}, {"name": "355", "dtype": "float32"}, {"name": "356", "dtype": "float32"}, {"name": "357", "dtype": "float32"}, {"name": "358", "dtype": "float32"}, {"name": "359", "dtype": "float32"}, {"name": "360", "dtype": "float32"}, {"name": "361", "dtype": "float32"}, {"name": "362", "dtype": "float32"}, {"name": "363", "dtype": "float32"}, {"name": "364", "dtype": "float32"}, {"name": "365", "dtype": "float32"}, {"name": "366", "dtype": "float32"}, {"name": "367", "dtype": "float32"}, {"name": "368", "dtype": "float32"}, {"name": "369", "dtype": "float32"}, {"name": "370", "dtype": "float32"}, {"name": "371", "dtype": "float32"}, {"name": "372", "dtype": "float32"}, {"name": "373", "dtype": "float32"}, {"name": "374", "dtype": "float32"}, {"name": "375", "dtype": "float32"}, {"name": "376", "dtype": "float32"}, {"name": "377", "dtype": "float32"}, {"name": "378", "dtype": "float32"}, {"name": "379", "dtype": "float32"}, {"name": "380", "dtype": "float32"}, {"name": "381", "dtype": "float32"}, {"name": "382", "dtype": "float32"}, {"name": "383", "dtype": "float32"}, {"name": "384", "dtype": "float32"}, {"name": "385", "dtype": "float32"}, {"name": "386", "dtype": "float32"}, {"name": "387", "dtype": "float32"}, {"name": "388", "dtype": "float32"}, {"name": "389", "dtype": "float32"}, {"name": "390", "dtype": "float32"}, {"name": "391", "dtype": "float32"}, {"name": "392", "dtype": "float32"}, {"name": "393", "dtype": "float32"}, {"name": "394", "dtype": "float32"}, {"name": "395", "dtype": "float32"}, {"name": "396", "dtype": "float32"}, {"name": "397", "dtype": "float32"}, {"name": "398", "dtype": "float32"}, {"name": "399", "dtype": "float32"}, {"name": "400", "dtype": "float32"}, {"name": "401", "dtype": "float32"}, {"name": "402", "dtype": "float32"}, {"name": "403", "dtype": "float32"}, {"name": "404", "dtype": "float32"}, {"name": "405", "dtype": "float32"}, {"name": "406", "dtype": "float32"}, {"name": "407", "dtype": "float32"}, {"name": "408", "dtype": "float32"}, {"name": "409", "dtype": "float32"}, {"name": "410", "dtype": "float32"}, {"name": "411", "dtype": "float32"}, {"name": "412", "dtype": "float32"}, {"name": "413", "dtype": "float32"}, {"name": "414", "dtype": "float32"}, {"name": "415", "dtype": "float32"}, {"name": "416", "dtype": "float32"}, {"name": "417", "dtype": "float32"}, {"name": "418", "dtype": "float32"}, {"name": "419", "dtype": "float32"}, {"name": "420", "dtype": "float32"}, {"name": "421", "dtype": "float32"}, {"name": "422", "dtype": "float32"}, {"name": "423", "dtype": "float32"}, {"name": "424", "dtype": "float32"}, {"name": "425", "dtype": "float32"}, {"name": "426", "dtype": "float32"}, {"name": "427", "dtype": "float32"}, {"name": "428", "dtype": "float32"}, {"name": "429", "dtype": "float32"}, {"name": "430", "dtype": "float32"}, {"name": "431", "dtype": "float32"}, {"name": "432", "dtype": "float32"}, {"name": "433", "dtype": "float32"}, {"name": "434", "dtype": "float32"}, {"name": "435", "dtype": "float32"}, {"name": "436", "dtype": "float32"}, {"name": "437", "dtype": "float32"}, {"name": "438", "dtype": "float32"}, {"name": "439", "dtype": "float32"}, {"name": "440", "dtype": "float32"}, {"name": "441", "dtype": "float32"}, {"name": "442", "dtype": "float32"}, {"name": "443", "dtype": "float32"}, {"name": "444", "dtype": "float32"}, {"name": "445", "dtype": "float32"}, {"name": "446", "dtype": "float32"}, {"name": "447", "dtype": "float32"}, {"name": "448", "dtype": "float32"}, {"name": "449", "dtype": "float32"}, {"name": "450", "dtype": "float32"}, {"name": "451", "dtype": "float32"}, {"name": "452", "dtype": "float32"}, {"name": "453", "dtype": "float32"}, {"name": "454", "dtype": "float32"}, {"name": "455", "dtype": "float32"}, {"name": "456", "dtype": "float32"}, {"name": "457", "dtype": "float32"}, {"name": "458", "dtype": "float32"}, {"name": "459", "dtype": "float32"}, {"name": "460", "dtype": "float32"}, {"name": "461", "dtype": "float32"}, {"name": "462", "dtype": "float32"}, {"name": "463", "dtype": "float32"}, {"name": "464", "dtype": "float32"}, {"name": "465", "dtype": "float32"}, {"name": "466", "dtype": "float32"}, {"name": "467", "dtype": "float32"}, {"name": "468", "dtype": "float32"}, {"name": "469", "dtype": "float32"}, {"name": "470", "dtype": "float32"}, {"name": "471", "dtype": "float32"}, {"name": "472", "dtype": "float32"}, {"name": "473", "dtype": "float32"}, {"name": "474", "dtype": "float32"}, {"name": "475", "dtype": "float32"}, {"name": "476", "dtype": "float32"}, {"name": "477", "dtype": "float32"}, {"name": "478", "dtype": "float32"}, {"name": "479", "dtype": "float32"}, {"name": "480", "dtype": "float32"}, {"name": "481", "dtype": "float32"}, {"name": "482", "dtype": "float32"}, {"name": "483", "dtype": "float32"}, {"name": "484", "dtype": "float32"}, {"name": "485", "dtype": "float32"}, {"name": "486", "dtype": "float32"}, {"name": "487", "dtype": "float32"}, {"name": "488", "dtype": "float32"}, {"name": "489", "dtype": "float32"}, {"name": "490", "dtype": "float32"}, {"name": "491", "dtype": "float32"}, {"name": "492", "dtype": "float32"}, {"name": "493", "dtype": "float32"}, {"name": "494", "dtype": "float32"}, {"name": "495", "dtype": "float32"}, {"name": "496", "dtype": "float32"}, {"name": "497", "dtype": "float32"}, {"name": "498", "dtype": "float32"}, {"name": "499", "dtype": "float32"}, {"name": "500", "dtype": "float32"}, {"name": "501", "dtype": "float32"}, {"name": "502", "dtype": "float32"}, {"name": "503", "dtype": "float32"}, {"name": "504", "dtype": "float32"}, {"name": "505", "dtype": "float32"}, {"name": "506", "dtype": "float32"}, {"name": "507", "dtype": "float32"}, {"name": "508", "dtype": "float32"}, {"name": "509", "dtype": "float32"}, {"name": "510", "dtype": "float32"}, {"name": "511", "dtype": "float32"}, {"name": "512", "dtype": "float32"}, {"name": "513", "dtype": "float32"}, {"name": "514", "dtype": "float32"}, {"name": "515", "dtype": "float32"}, {"name": "516", "dtype": "float32"}, {"name": "517", "dtype": "float32"}, {"name": "518", "dtype": "float32"}, {"name": "519", "dtype": "float32"}, {"name": "520", "dtype": "float32"}, {"name": "521", "dtype": "float32"}, {"name": "522", "dtype": "float32"}, {"name": "523", "dtype": "float32"}, {"name": "524", "dtype": "float32"}, {"name": "525", "dtype": "float32"}, {"name": "526", "dtype": "float32"}, {"name": "527", "dtype": "float32"}, {"name": "528", "dtype": "float32"}, {"name": "529", "dtype": "float32"}, {"name": "530", "dtype": "float32"}, {"name": "531", "dtype": "float32"}, {"name": "532", "dtype": "float32"}, {"name": "533", "dtype": "float32"}, {"name": "534", "dtype": "float32"}, {"name": "535", "dtype": "float32"}, {"name": "536", "dtype": "float32"}, {"name": "537", "dtype": "float32"}, {"name": "538", "dtype": "float32"}, {"name": "539", "dtype": "float32"}, {"name": "540", "dtype": "float32"}, {"name": "541", "dtype": "float32"}, {"name": "542", "dtype": "float32"}, {"name": "543", "dtype": "float32"}, {"name": "544", "dtype": "float32"}, {"name": "545", "dtype": "float32"}, {"name": "546", "dtype": "float32"}, {"name": "547", "dtype": "float32"}, {"name": "548", "dtype": "float32"}, {"name": "549", "dtype": "float32"}, {"name": "550", "dtype": "float32"}, {"name": "551", "dtype": "float32"}, {"name": "552", "dtype": "float32"}, {"name": "553", "dtype": "float32"}, {"name": "554", "dtype": "float32"}, {"name": "555", "dtype": "float32"}, {"name": "556", "dtype": "float32"}, {"name": "557", "dtype": "float32"}, {"name": "558", "dtype": "float32"}, {"name": "559", "dtype": "float32"}, {"name": "560", "dtype": "float32"}, {"name": "561", "dtype": "float32"}, {"name": "562", "dtype": "float32"}, {"name": "563", "dtype": "float32"}, {"name": "564", "dtype": "float32"}, {"name": "565", "dtype": "float32"}, {"name": "566", "dtype": "float32"}, {"name": "567", "dtype": "float32"}, {"name": "568", "dtype": "float32"}, {"name": "569", "dtype": "float32"}, {"name": "570", "dtype": "float32"}, {"name": "571", "dtype": "float32"}, {"name": "572", "dtype": "float32"}, {"name": "573", "dtype": "float32"}, {"name": "574", "dtype": "float32"}, {"name": "575", "dtype": "float32"}, {"name": "576", "dtype": "float32"}, {"name": "577", "dtype": "float32"}, {"name": "578", "dtype": "float32"}, {"name": "579", "dtype": "float32"}, {"name": "580", "dtype": "float32"}, {"name": "581", "dtype": "float32"}, {"name": "582", "dtype": "float32"}, {"name": "583", "dtype": "float32"}, {"name": "584", "dtype": "float32"}, {"name": "585", "dtype": "float32"}, {"name": "586", "dtype": "float32"}, {"name": "587", "dtype": "float32"}, {"name": "588", "dtype": "float32"}, {"name": "589", "dtype": "float32"}, {"name": "590", "dtype": "float32"}, {"name": "591", "dtype": "float32"}, {"name": "592", "dtype": "float32"}, {"name": "593", "dtype": "float32"}, {"name": "594", "dtype": "float32"}, {"name": "595", "dtype": "float32"}, {"name": "596", "dtype": "float32"}, {"name": "597", "dtype": "float32"}, {"name": "598", "dtype": "float32"}, {"name": "599", "dtype": "float32"}, {"name": "600", "dtype": "float32"}, {"name": "601", "dtype": "float32"}, {"name": "602", "dtype": "float32"}, {"name": "603", "dtype": "float32"}, {"name": "604", "dtype": "float32"}, {"name": "605", "dtype": "float32"}, {"name": "606", "dtype": "float32"}, {"name": "607", "dtype": "float32"}, {"name": "608", "dtype": "float32"}, {"name": "609", "dtype": "float32"}, {"name": "610", "dtype": "float32"}, {"name": "611", "dtype": "float32"}, {"name": "612", "dtype": "float32"}, {"name": "613", "dtype": "float32"}, {"name": "614", "dtype": "float32"}, {"name": "615", "dtype": "float32"}, {"name": "616", "dtype": "float32"}, {"name": "617", "dtype": "float32"}, {"name": "618", "dtype": "float32"}, {"name": "619", "dtype": "float32"}, {"name": "620", "dtype": "float32"}, {"name": "621", "dtype": "float32"}, {"name": "622", "dtype": "float32"}, {"name": "623", "dtype": "float32"}, {"name": "624", "dtype": "float32"}, {"name": "625", "dtype": "float32"}, {"name": "626", "dtype": "float32"}, {"name": "627", "dtype": "float32"}, {"name": "628", "dtype": "float32"}, {"name": "629", "dtype": "float32"}, {"name": "630", "dtype": "float32"}, {"name": "631", "dtype": "float32"}, {"name": "632", "dtype": "float32"}, {"name": "633", "dtype": "float32"}, {"name": "634", "dtype": "float32"}, {"name": "635", "dtype": "float32"}, {"name": "636", "dtype": "float32"}, {"name": "637", "dtype": "float32"}, {"name": "638", "dtype": "float32"}, {"name": "639", "dtype": "float32"}, {"name": "640", "dtype": "float32"}, {"name": "641", "dtype": "float32"}, {"name": "642", "dtype": "float32"}, {"name": "643", "dtype": "float32"}, {"name": "644", "dtype": "float32"}, {"name": "645", "dtype": "float32"}, {"name": "646", "dtype": "float32"}, {"name": "647", "dtype": "float32"}, {"name": "648", "dtype": "float32"}, {"name": "649", "dtype": "float32"}, {"name": "650", "dtype": "float32"}, {"name": "651", "dtype": "float32"}, {"name": "652", "dtype": "float32"}, {"name": "653", "dtype": "float32"}, {"name": "654", "dtype": "float32"}, {"name": "655", "dtype": "float32"}, {"name": "656", "dtype": "float32"}, {"name": "657", "dtype": "float32"}, {"name": "658", "dtype": "float32"}, {"name": "659", "dtype": "float32"}, {"name": "660", "dtype": "float32"}, {"name": "661", "dtype": "float32"}, {"name": "662", "dtype": "float32"}, {"name": "663", "dtype": "float32"}, {"name": "664", "dtype": "float32"}, {"name": "665", "dtype": "float32"}, {"name": "666", "dtype": "float32"}, {"name": "667", "dtype": "float32"}, {"name": "668", "dtype": "float32"}, {"name": "669", "dtype": "float32"}, {"name": "670", "dtype": "float32"}, {"name": "671", "dtype": "float32"}, {"name": "672", "dtype": "float32"}, {"name": "673", "dtype": "float32"}, {"name": "674", "dtype": "float32"}, {"name": "675", "dtype": "float32"}, {"name": "676", "dtype": "float32"}, {"name": "677", "dtype": "float32"}, {"name": "678", "dtype": "float32"}, {"name": "679", "dtype": "float32"}, {"name": "680", "dtype": "float32"}, {"name": "681", "dtype": "float32"}, {"name": "682", "dtype": "float32"}, {"name": "683", "dtype": "float32"}, {"name": "684", "dtype": "float32"}, {"name": "685", "dtype": "float32"}, {"name": "686", "dtype": "float32"}, {"name": "687", "dtype": "float32"}, {"name": "688", "dtype": "float32"}, {"name": "689", "dtype": "float32"}, {"name": "690", "dtype": "float32"}, {"name": "691", "dtype": "float32"}, {"name": "692", "dtype": "float32"}, {"name": "693", "dtype": "float32"}, {"name": "694", "dtype": "float32"}, {"name": "695", "dtype": "float32"}, {"name": "696", "dtype": "float32"}, {"name": "697", "dtype": "float32"}, {"name": "698", "dtype": "float32"}, {"name": "699", "dtype": "float32"}, {"name": "700", "dtype": "float32"}, {"name": "701", "dtype": "float32"}, {"name": "702", "dtype": "float32"}, {"name": "703", "dtype": "float32"}, {"name": "704", "dtype": "float32"}, {"name": "705", "dtype": "float32"}, {"name": "706", "dtype": "float32"}, {"name": "707", "dtype": "float32"}, {"name": "708", "dtype": "float32"}, {"name": "709", "dtype": "float32"}, {"name": "710", "dtype": "float32"}, {"name": "711", "dtype": "float32"}, {"name": "712", "dtype": "float32"}, {"name": "713", "dtype": "float32"}, {"name": "714", "dtype": "float32"}, {"name": "715", "dtype": "float32"}, {"name": "716", "dtype": "float32"}, {"name": "717", "dtype": "float32"}, {"name": "718", "dtype": "float32"}, {"name": "719", "dtype": "float32"}, {"name": "720", "dtype": "float32"}, {"name": "721", "dtype": "float32"}, {"name": "722", "dtype": "float32"}, {"name": "723", "dtype": "float32"}, {"name": "724", "dtype": "float32"}, {"name": "725", "dtype": "float32"}, {"name": "726", "dtype": "float32"}, {"name": "727", "dtype": "float32"}, {"name": "728", "dtype": "float32"}, {"name": "729", "dtype": "float32"}, {"name": "730", "dtype": "float32"}, {"name": "731", "dtype": "float32"}, {"name": "732", "dtype": "float32"}, {"name": "733", "dtype": "float32"}, {"name": "734", "dtype": "float32"}, {"name": "735", "dtype": "float32"}, {"name": "736", "dtype": "float32"}, {"name": "737", "dtype": "float32"}, {"name": "738", "dtype": "float32"}, {"name": "739", "dtype": "float32"}, {"name": "740", "dtype": "float32"}, {"name": "741", "dtype": "float32"}, {"name": "742", "dtype": "float32"}, {"name": "743", "dtype": "float32"}, {"name": "744", "dtype": "float32"}, {"name": "745", "dtype": "float32"}, {"name": "746", "dtype": "float32"}, {"name": "747", "dtype": "float32"}, {"name": "748", "dtype": "float32"}, {"name": "749", "dtype": "float32"}, {"name": "750", "dtype": "float32"}, {"name": "751", "dtype": "float32"}, {"name": "752", "dtype": "float32"}, {"name": "753", "dtype": "float32"}, {"name": "754", "dtype": "float32"}, {"name": "755", "dtype": "float32"}, {"name": "756", "dtype": "float32"}, {"name": "757", "dtype": "float32"}, {"name": "758", "dtype": "float32"}, {"name": "759", "dtype": "float32"}, {"name": "760", "dtype": "float32"}, {"name": "761", "dtype": "float32"}, {"name": "762", "dtype": "float32"}, {"name": "763", "dtype": "float32"}, {"name": "764", "dtype": "float32"}, {"name": "765", "dtype": "float32"}, {"name": "766", "dtype": "float32"}, {"name": "767", "dtype": "float32"}, {"name": "label", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 115650065.625, "num_examples": 37500}, {"name": "test", "num_bytes": 38550020.0, "num_examples": 12500}], "download_size": 211789418, "dataset_size": 154200085.625}}
2023-08-18T16:28:06+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Spirit_RoBERTa_Baseline" More Information needed
[ "# Dataset Card for \"Spirit_RoBERTa_Baseline\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Spirit_RoBERTa_Baseline\"\n\nMore Information needed" ]
[ 6, 20 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"Spirit_RoBERTa_Baseline\"\n\nMore Information needed" ]
28145acec6cea3a3387715e4d051558e2b1a4e73
# Dataset Card for "Spirit_DistilRoBERTa_Baseline" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
EgilKarlsen/Spirit_DistilRoBERTa_Baseline
[ "region:us" ]
2023-08-18T16:28:53+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "0", "dtype": "float32"}, {"name": "1", "dtype": "float32"}, {"name": "2", "dtype": "float32"}, {"name": "3", "dtype": "float32"}, {"name": "4", "dtype": "float32"}, {"name": "5", "dtype": "float32"}, {"name": "6", "dtype": "float32"}, {"name": "7", "dtype": "float32"}, {"name": "8", "dtype": "float32"}, {"name": "9", "dtype": "float32"}, {"name": "10", "dtype": "float32"}, {"name": "11", "dtype": "float32"}, {"name": "12", "dtype": "float32"}, {"name": "13", "dtype": "float32"}, {"name": "14", "dtype": "float32"}, {"name": "15", "dtype": "float32"}, {"name": "16", "dtype": "float32"}, {"name": "17", "dtype": "float32"}, {"name": "18", "dtype": "float32"}, {"name": "19", "dtype": "float32"}, {"name": "20", "dtype": "float32"}, {"name": "21", "dtype": "float32"}, {"name": "22", "dtype": "float32"}, {"name": "23", "dtype": "float32"}, {"name": "24", "dtype": "float32"}, {"name": "25", "dtype": "float32"}, {"name": "26", "dtype": "float32"}, {"name": "27", "dtype": "float32"}, {"name": "28", "dtype": "float32"}, {"name": "29", "dtype": "float32"}, {"name": "30", "dtype": "float32"}, {"name": "31", "dtype": "float32"}, {"name": "32", "dtype": "float32"}, {"name": "33", "dtype": "float32"}, {"name": "34", "dtype": "float32"}, {"name": "35", "dtype": "float32"}, {"name": "36", "dtype": "float32"}, {"name": "37", "dtype": "float32"}, {"name": "38", "dtype": "float32"}, {"name": "39", "dtype": "float32"}, {"name": "40", "dtype": "float32"}, {"name": "41", "dtype": "float32"}, {"name": "42", "dtype": "float32"}, {"name": "43", "dtype": "float32"}, {"name": "44", "dtype": "float32"}, {"name": "45", "dtype": "float32"}, {"name": "46", "dtype": "float32"}, {"name": "47", "dtype": "float32"}, {"name": "48", "dtype": "float32"}, {"name": "49", "dtype": "float32"}, {"name": "50", "dtype": "float32"}, {"name": "51", "dtype": "float32"}, {"name": "52", "dtype": "float32"}, {"name": "53", "dtype": "float32"}, {"name": "54", "dtype": "float32"}, {"name": "55", "dtype": "float32"}, {"name": "56", "dtype": "float32"}, {"name": "57", "dtype": "float32"}, {"name": "58", "dtype": "float32"}, {"name": "59", "dtype": "float32"}, {"name": "60", "dtype": "float32"}, {"name": "61", "dtype": "float32"}, {"name": "62", "dtype": "float32"}, {"name": "63", "dtype": "float32"}, {"name": "64", "dtype": "float32"}, {"name": "65", "dtype": "float32"}, {"name": "66", "dtype": "float32"}, {"name": "67", "dtype": "float32"}, {"name": "68", "dtype": "float32"}, {"name": "69", "dtype": "float32"}, {"name": "70", "dtype": "float32"}, {"name": "71", "dtype": "float32"}, {"name": "72", "dtype": "float32"}, {"name": "73", "dtype": "float32"}, {"name": "74", "dtype": "float32"}, {"name": "75", "dtype": "float32"}, {"name": "76", "dtype": "float32"}, {"name": "77", "dtype": "float32"}, {"name": "78", "dtype": "float32"}, {"name": "79", "dtype": "float32"}, {"name": "80", "dtype": "float32"}, {"name": "81", "dtype": "float32"}, {"name": "82", "dtype": "float32"}, {"name": "83", "dtype": "float32"}, {"name": "84", "dtype": "float32"}, {"name": "85", "dtype": "float32"}, {"name": "86", "dtype": "float32"}, {"name": "87", "dtype": "float32"}, {"name": "88", "dtype": "float32"}, {"name": "89", "dtype": "float32"}, {"name": "90", "dtype": "float32"}, {"name": "91", "dtype": "float32"}, {"name": "92", "dtype": "float32"}, {"name": "93", "dtype": "float32"}, {"name": "94", "dtype": "float32"}, {"name": "95", "dtype": "float32"}, {"name": "96", "dtype": "float32"}, {"name": "97", "dtype": "float32"}, {"name": "98", "dtype": "float32"}, {"name": "99", "dtype": "float32"}, {"name": "100", "dtype": "float32"}, {"name": "101", "dtype": "float32"}, {"name": "102", "dtype": "float32"}, {"name": "103", "dtype": "float32"}, {"name": "104", "dtype": "float32"}, {"name": "105", "dtype": "float32"}, {"name": "106", "dtype": "float32"}, {"name": "107", "dtype": "float32"}, {"name": "108", "dtype": "float32"}, {"name": "109", "dtype": "float32"}, {"name": "110", "dtype": "float32"}, {"name": "111", "dtype": "float32"}, {"name": "112", "dtype": "float32"}, {"name": "113", "dtype": "float32"}, {"name": "114", "dtype": "float32"}, {"name": "115", "dtype": "float32"}, {"name": "116", "dtype": "float32"}, {"name": "117", "dtype": "float32"}, {"name": "118", "dtype": "float32"}, {"name": "119", "dtype": "float32"}, {"name": "120", "dtype": "float32"}, {"name": "121", "dtype": "float32"}, {"name": "122", "dtype": "float32"}, {"name": "123", "dtype": "float32"}, {"name": "124", "dtype": "float32"}, {"name": "125", "dtype": "float32"}, {"name": "126", "dtype": "float32"}, {"name": "127", "dtype": "float32"}, {"name": "128", "dtype": "float32"}, {"name": "129", "dtype": "float32"}, {"name": "130", "dtype": "float32"}, {"name": "131", "dtype": "float32"}, {"name": "132", "dtype": "float32"}, {"name": "133", "dtype": "float32"}, {"name": "134", "dtype": "float32"}, {"name": "135", "dtype": "float32"}, {"name": "136", "dtype": "float32"}, {"name": "137", "dtype": "float32"}, {"name": "138", "dtype": "float32"}, {"name": "139", "dtype": "float32"}, {"name": "140", "dtype": "float32"}, {"name": "141", "dtype": "float32"}, {"name": "142", "dtype": "float32"}, {"name": "143", "dtype": "float32"}, {"name": "144", "dtype": "float32"}, {"name": "145", "dtype": "float32"}, {"name": "146", "dtype": "float32"}, {"name": "147", "dtype": "float32"}, {"name": "148", "dtype": "float32"}, {"name": "149", "dtype": "float32"}, {"name": "150", "dtype": "float32"}, {"name": "151", "dtype": "float32"}, {"name": "152", "dtype": "float32"}, {"name": "153", "dtype": "float32"}, {"name": "154", "dtype": "float32"}, {"name": "155", "dtype": "float32"}, {"name": "156", "dtype": "float32"}, {"name": "157", "dtype": "float32"}, {"name": "158", "dtype": "float32"}, {"name": "159", "dtype": "float32"}, {"name": "160", "dtype": "float32"}, {"name": "161", "dtype": "float32"}, {"name": "162", "dtype": "float32"}, {"name": "163", "dtype": "float32"}, {"name": "164", "dtype": "float32"}, {"name": "165", "dtype": "float32"}, {"name": "166", "dtype": "float32"}, {"name": "167", "dtype": "float32"}, {"name": "168", "dtype": "float32"}, {"name": "169", "dtype": "float32"}, {"name": "170", "dtype": "float32"}, {"name": "171", "dtype": "float32"}, {"name": "172", "dtype": "float32"}, {"name": "173", "dtype": "float32"}, {"name": "174", "dtype": "float32"}, {"name": "175", "dtype": "float32"}, {"name": "176", "dtype": "float32"}, {"name": "177", "dtype": "float32"}, {"name": "178", "dtype": "float32"}, {"name": "179", "dtype": "float32"}, {"name": "180", "dtype": "float32"}, {"name": "181", "dtype": "float32"}, {"name": "182", "dtype": "float32"}, {"name": "183", "dtype": "float32"}, {"name": "184", "dtype": "float32"}, {"name": "185", "dtype": "float32"}, {"name": "186", "dtype": "float32"}, {"name": "187", "dtype": "float32"}, {"name": "188", "dtype": "float32"}, {"name": "189", "dtype": "float32"}, {"name": "190", "dtype": "float32"}, {"name": "191", "dtype": "float32"}, {"name": "192", "dtype": "float32"}, {"name": "193", "dtype": "float32"}, {"name": "194", "dtype": "float32"}, {"name": "195", "dtype": "float32"}, {"name": "196", "dtype": "float32"}, {"name": "197", "dtype": "float32"}, {"name": "198", "dtype": "float32"}, {"name": "199", "dtype": "float32"}, {"name": "200", "dtype": "float32"}, {"name": "201", "dtype": "float32"}, {"name": "202", "dtype": "float32"}, {"name": "203", "dtype": "float32"}, {"name": "204", "dtype": "float32"}, {"name": "205", "dtype": "float32"}, {"name": "206", "dtype": "float32"}, {"name": "207", "dtype": "float32"}, {"name": "208", "dtype": "float32"}, {"name": "209", "dtype": "float32"}, {"name": "210", "dtype": "float32"}, {"name": "211", "dtype": "float32"}, {"name": "212", "dtype": "float32"}, {"name": "213", "dtype": "float32"}, {"name": "214", "dtype": "float32"}, {"name": "215", "dtype": "float32"}, {"name": "216", "dtype": "float32"}, {"name": "217", "dtype": "float32"}, {"name": "218", "dtype": "float32"}, {"name": "219", "dtype": "float32"}, {"name": "220", "dtype": "float32"}, {"name": "221", "dtype": "float32"}, {"name": "222", "dtype": "float32"}, {"name": "223", "dtype": "float32"}, {"name": "224", "dtype": "float32"}, {"name": "225", "dtype": "float32"}, {"name": "226", "dtype": "float32"}, {"name": "227", "dtype": "float32"}, {"name": "228", "dtype": "float32"}, {"name": "229", "dtype": "float32"}, {"name": "230", "dtype": "float32"}, {"name": "231", "dtype": "float32"}, {"name": "232", "dtype": "float32"}, {"name": "233", "dtype": "float32"}, {"name": "234", "dtype": "float32"}, {"name": "235", "dtype": "float32"}, {"name": "236", "dtype": "float32"}, {"name": "237", "dtype": "float32"}, {"name": "238", "dtype": "float32"}, {"name": "239", "dtype": "float32"}, {"name": "240", "dtype": "float32"}, {"name": "241", "dtype": "float32"}, {"name": "242", "dtype": "float32"}, {"name": "243", "dtype": "float32"}, {"name": "244", "dtype": "float32"}, {"name": "245", "dtype": "float32"}, {"name": "246", "dtype": "float32"}, {"name": "247", "dtype": "float32"}, {"name": "248", "dtype": "float32"}, {"name": "249", "dtype": "float32"}, {"name": "250", "dtype": "float32"}, {"name": "251", "dtype": "float32"}, {"name": "252", "dtype": "float32"}, {"name": "253", "dtype": "float32"}, {"name": "254", "dtype": "float32"}, {"name": "255", "dtype": "float32"}, {"name": "256", "dtype": "float32"}, {"name": "257", "dtype": "float32"}, {"name": "258", "dtype": "float32"}, {"name": "259", "dtype": "float32"}, {"name": "260", "dtype": "float32"}, {"name": "261", "dtype": "float32"}, {"name": "262", "dtype": "float32"}, {"name": "263", "dtype": "float32"}, {"name": "264", "dtype": "float32"}, {"name": "265", "dtype": "float32"}, {"name": "266", "dtype": "float32"}, {"name": "267", "dtype": "float32"}, {"name": "268", "dtype": "float32"}, {"name": "269", "dtype": "float32"}, {"name": "270", "dtype": "float32"}, {"name": "271", "dtype": "float32"}, {"name": "272", "dtype": "float32"}, {"name": "273", "dtype": "float32"}, {"name": "274", "dtype": "float32"}, {"name": "275", "dtype": "float32"}, {"name": "276", "dtype": "float32"}, {"name": "277", "dtype": "float32"}, {"name": "278", "dtype": "float32"}, {"name": "279", "dtype": "float32"}, {"name": "280", "dtype": "float32"}, {"name": "281", "dtype": "float32"}, {"name": "282", "dtype": "float32"}, {"name": "283", "dtype": "float32"}, {"name": "284", "dtype": "float32"}, {"name": "285", "dtype": "float32"}, {"name": "286", "dtype": "float32"}, {"name": "287", "dtype": "float32"}, {"name": "288", "dtype": "float32"}, {"name": "289", "dtype": "float32"}, {"name": "290", "dtype": "float32"}, {"name": "291", "dtype": "float32"}, {"name": "292", "dtype": "float32"}, {"name": "293", "dtype": "float32"}, {"name": "294", "dtype": "float32"}, {"name": "295", "dtype": "float32"}, {"name": "296", "dtype": "float32"}, {"name": "297", "dtype": "float32"}, {"name": "298", "dtype": "float32"}, {"name": "299", "dtype": "float32"}, {"name": "300", "dtype": "float32"}, {"name": "301", "dtype": "float32"}, {"name": "302", "dtype": "float32"}, {"name": "303", "dtype": "float32"}, {"name": "304", "dtype": "float32"}, {"name": "305", "dtype": "float32"}, {"name": "306", "dtype": "float32"}, {"name": "307", "dtype": "float32"}, {"name": "308", "dtype": "float32"}, {"name": "309", "dtype": "float32"}, {"name": "310", "dtype": "float32"}, {"name": "311", "dtype": "float32"}, {"name": "312", "dtype": "float32"}, {"name": "313", "dtype": "float32"}, {"name": "314", "dtype": "float32"}, {"name": "315", "dtype": "float32"}, {"name": "316", "dtype": "float32"}, {"name": "317", "dtype": "float32"}, {"name": "318", "dtype": "float32"}, {"name": "319", "dtype": "float32"}, {"name": "320", "dtype": "float32"}, {"name": "321", "dtype": "float32"}, {"name": "322", "dtype": "float32"}, {"name": "323", "dtype": "float32"}, {"name": "324", "dtype": "float32"}, {"name": "325", "dtype": "float32"}, {"name": "326", "dtype": "float32"}, {"name": "327", "dtype": "float32"}, {"name": "328", "dtype": "float32"}, {"name": "329", "dtype": "float32"}, {"name": "330", "dtype": "float32"}, {"name": "331", "dtype": "float32"}, {"name": "332", "dtype": "float32"}, {"name": "333", "dtype": "float32"}, {"name": "334", "dtype": "float32"}, {"name": "335", "dtype": "float32"}, {"name": "336", "dtype": "float32"}, {"name": "337", "dtype": "float32"}, {"name": "338", "dtype": "float32"}, {"name": "339", "dtype": "float32"}, {"name": "340", "dtype": "float32"}, {"name": "341", "dtype": "float32"}, {"name": "342", "dtype": "float32"}, {"name": "343", "dtype": "float32"}, {"name": "344", "dtype": "float32"}, {"name": "345", "dtype": "float32"}, {"name": "346", "dtype": "float32"}, {"name": "347", "dtype": "float32"}, {"name": "348", "dtype": "float32"}, {"name": "349", "dtype": "float32"}, {"name": "350", "dtype": "float32"}, {"name": "351", "dtype": "float32"}, {"name": "352", "dtype": "float32"}, {"name": "353", "dtype": "float32"}, {"name": "354", "dtype": "float32"}, {"name": "355", "dtype": "float32"}, {"name": "356", "dtype": "float32"}, {"name": "357", "dtype": "float32"}, {"name": "358", "dtype": "float32"}, {"name": "359", "dtype": "float32"}, {"name": "360", "dtype": "float32"}, {"name": "361", "dtype": "float32"}, {"name": "362", "dtype": "float32"}, {"name": "363", "dtype": "float32"}, {"name": "364", "dtype": "float32"}, {"name": "365", "dtype": "float32"}, {"name": "366", "dtype": "float32"}, {"name": "367", "dtype": "float32"}, {"name": "368", "dtype": "float32"}, {"name": "369", "dtype": "float32"}, {"name": "370", "dtype": "float32"}, {"name": "371", "dtype": "float32"}, {"name": "372", "dtype": "float32"}, {"name": "373", "dtype": "float32"}, {"name": "374", "dtype": "float32"}, {"name": "375", "dtype": "float32"}, {"name": "376", "dtype": "float32"}, {"name": "377", "dtype": "float32"}, {"name": "378", "dtype": "float32"}, {"name": "379", "dtype": "float32"}, {"name": "380", "dtype": "float32"}, {"name": "381", "dtype": "float32"}, {"name": "382", "dtype": "float32"}, {"name": "383", "dtype": "float32"}, {"name": "384", "dtype": "float32"}, {"name": "385", "dtype": "float32"}, {"name": "386", "dtype": "float32"}, {"name": "387", "dtype": "float32"}, {"name": "388", "dtype": "float32"}, {"name": "389", "dtype": "float32"}, {"name": "390", "dtype": "float32"}, {"name": "391", "dtype": "float32"}, {"name": "392", "dtype": "float32"}, {"name": "393", "dtype": "float32"}, {"name": "394", "dtype": "float32"}, {"name": "395", "dtype": "float32"}, {"name": "396", "dtype": "float32"}, {"name": "397", "dtype": "float32"}, {"name": "398", "dtype": "float32"}, {"name": "399", "dtype": "float32"}, {"name": "400", "dtype": "float32"}, {"name": "401", "dtype": "float32"}, {"name": "402", "dtype": "float32"}, {"name": "403", "dtype": "float32"}, {"name": "404", "dtype": "float32"}, {"name": "405", "dtype": "float32"}, {"name": "406", "dtype": "float32"}, {"name": "407", "dtype": "float32"}, {"name": "408", "dtype": "float32"}, {"name": "409", "dtype": "float32"}, {"name": "410", "dtype": "float32"}, {"name": "411", "dtype": "float32"}, {"name": "412", "dtype": "float32"}, {"name": "413", "dtype": "float32"}, {"name": "414", "dtype": "float32"}, {"name": "415", "dtype": "float32"}, {"name": "416", "dtype": "float32"}, {"name": "417", "dtype": "float32"}, {"name": "418", "dtype": "float32"}, {"name": "419", "dtype": "float32"}, {"name": "420", "dtype": "float32"}, {"name": "421", "dtype": "float32"}, {"name": "422", "dtype": "float32"}, {"name": "423", "dtype": "float32"}, {"name": "424", "dtype": "float32"}, {"name": "425", "dtype": "float32"}, {"name": "426", "dtype": "float32"}, {"name": "427", "dtype": "float32"}, {"name": "428", "dtype": "float32"}, {"name": "429", "dtype": "float32"}, {"name": "430", "dtype": "float32"}, {"name": "431", "dtype": "float32"}, {"name": "432", "dtype": "float32"}, {"name": "433", "dtype": "float32"}, {"name": "434", "dtype": "float32"}, {"name": "435", "dtype": "float32"}, {"name": "436", "dtype": "float32"}, {"name": "437", "dtype": "float32"}, {"name": "438", "dtype": "float32"}, {"name": "439", "dtype": "float32"}, {"name": "440", "dtype": "float32"}, {"name": "441", "dtype": "float32"}, {"name": "442", "dtype": "float32"}, {"name": "443", "dtype": "float32"}, {"name": "444", "dtype": "float32"}, {"name": "445", "dtype": "float32"}, {"name": "446", "dtype": "float32"}, {"name": "447", "dtype": "float32"}, {"name": "448", "dtype": "float32"}, {"name": "449", "dtype": "float32"}, {"name": "450", "dtype": "float32"}, {"name": "451", "dtype": "float32"}, {"name": "452", "dtype": "float32"}, {"name": "453", "dtype": "float32"}, {"name": "454", "dtype": "float32"}, {"name": "455", "dtype": "float32"}, {"name": "456", "dtype": "float32"}, {"name": "457", "dtype": "float32"}, {"name": "458", "dtype": "float32"}, {"name": "459", "dtype": "float32"}, {"name": "460", "dtype": "float32"}, {"name": "461", "dtype": "float32"}, {"name": "462", "dtype": "float32"}, {"name": "463", "dtype": "float32"}, {"name": "464", "dtype": "float32"}, {"name": "465", "dtype": "float32"}, {"name": "466", "dtype": "float32"}, {"name": "467", "dtype": "float32"}, {"name": "468", "dtype": "float32"}, {"name": "469", "dtype": "float32"}, {"name": "470", "dtype": "float32"}, {"name": "471", "dtype": "float32"}, {"name": "472", "dtype": "float32"}, {"name": "473", "dtype": "float32"}, {"name": "474", "dtype": "float32"}, {"name": "475", "dtype": "float32"}, {"name": "476", "dtype": "float32"}, {"name": "477", "dtype": "float32"}, {"name": "478", "dtype": "float32"}, {"name": "479", "dtype": "float32"}, {"name": "480", "dtype": "float32"}, {"name": "481", "dtype": "float32"}, {"name": "482", "dtype": "float32"}, {"name": "483", "dtype": "float32"}, {"name": "484", "dtype": "float32"}, {"name": "485", "dtype": "float32"}, {"name": "486", "dtype": "float32"}, {"name": "487", "dtype": "float32"}, {"name": "488", "dtype": "float32"}, {"name": "489", "dtype": "float32"}, {"name": "490", "dtype": "float32"}, {"name": "491", "dtype": "float32"}, {"name": "492", "dtype": "float32"}, {"name": "493", "dtype": "float32"}, {"name": "494", "dtype": "float32"}, {"name": "495", "dtype": "float32"}, {"name": "496", "dtype": "float32"}, {"name": "497", "dtype": "float32"}, {"name": "498", "dtype": "float32"}, {"name": "499", "dtype": "float32"}, {"name": "500", "dtype": "float32"}, {"name": "501", "dtype": "float32"}, {"name": "502", "dtype": "float32"}, {"name": "503", "dtype": "float32"}, {"name": "504", "dtype": "float32"}, {"name": "505", "dtype": "float32"}, {"name": "506", "dtype": "float32"}, {"name": "507", "dtype": "float32"}, {"name": "508", "dtype": "float32"}, {"name": "509", "dtype": "float32"}, {"name": "510", "dtype": "float32"}, {"name": "511", "dtype": "float32"}, {"name": "512", "dtype": "float32"}, {"name": "513", "dtype": "float32"}, {"name": "514", "dtype": "float32"}, {"name": "515", "dtype": "float32"}, {"name": "516", "dtype": "float32"}, {"name": "517", "dtype": "float32"}, {"name": "518", "dtype": "float32"}, {"name": "519", "dtype": "float32"}, {"name": "520", "dtype": "float32"}, {"name": "521", "dtype": "float32"}, {"name": "522", "dtype": "float32"}, {"name": "523", "dtype": "float32"}, {"name": "524", "dtype": "float32"}, {"name": "525", "dtype": "float32"}, {"name": "526", "dtype": "float32"}, {"name": "527", "dtype": "float32"}, {"name": "528", "dtype": "float32"}, {"name": "529", "dtype": "float32"}, {"name": "530", "dtype": "float32"}, {"name": "531", "dtype": "float32"}, {"name": "532", "dtype": "float32"}, {"name": "533", "dtype": "float32"}, {"name": "534", "dtype": "float32"}, {"name": "535", "dtype": "float32"}, {"name": "536", "dtype": "float32"}, {"name": "537", "dtype": "float32"}, {"name": "538", "dtype": "float32"}, {"name": "539", "dtype": "float32"}, {"name": "540", "dtype": "float32"}, {"name": "541", "dtype": "float32"}, {"name": "542", "dtype": "float32"}, {"name": "543", "dtype": "float32"}, {"name": "544", "dtype": "float32"}, {"name": "545", "dtype": "float32"}, {"name": "546", "dtype": "float32"}, {"name": "547", "dtype": "float32"}, {"name": "548", "dtype": "float32"}, {"name": "549", "dtype": "float32"}, {"name": "550", "dtype": "float32"}, {"name": "551", "dtype": "float32"}, {"name": "552", "dtype": "float32"}, {"name": "553", "dtype": "float32"}, {"name": "554", "dtype": "float32"}, {"name": "555", "dtype": "float32"}, {"name": "556", "dtype": "float32"}, {"name": "557", "dtype": "float32"}, {"name": "558", "dtype": "float32"}, {"name": "559", "dtype": "float32"}, {"name": "560", "dtype": "float32"}, {"name": "561", "dtype": "float32"}, {"name": "562", "dtype": "float32"}, {"name": "563", "dtype": "float32"}, {"name": "564", "dtype": "float32"}, {"name": "565", "dtype": "float32"}, {"name": "566", "dtype": "float32"}, {"name": "567", "dtype": "float32"}, {"name": "568", "dtype": "float32"}, {"name": "569", "dtype": "float32"}, {"name": "570", "dtype": "float32"}, {"name": "571", "dtype": "float32"}, {"name": "572", "dtype": "float32"}, {"name": "573", "dtype": "float32"}, {"name": "574", "dtype": "float32"}, {"name": "575", "dtype": "float32"}, {"name": "576", "dtype": "float32"}, {"name": "577", "dtype": "float32"}, {"name": "578", "dtype": "float32"}, {"name": "579", "dtype": "float32"}, {"name": "580", "dtype": "float32"}, {"name": "581", "dtype": "float32"}, {"name": "582", "dtype": "float32"}, {"name": "583", "dtype": "float32"}, {"name": "584", "dtype": "float32"}, {"name": "585", "dtype": "float32"}, {"name": "586", "dtype": "float32"}, {"name": "587", "dtype": "float32"}, {"name": "588", "dtype": "float32"}, {"name": "589", "dtype": "float32"}, {"name": "590", "dtype": "float32"}, {"name": "591", "dtype": "float32"}, {"name": "592", "dtype": "float32"}, {"name": "593", "dtype": "float32"}, {"name": "594", "dtype": "float32"}, {"name": "595", "dtype": "float32"}, {"name": "596", "dtype": "float32"}, {"name": "597", "dtype": "float32"}, {"name": "598", "dtype": "float32"}, {"name": "599", "dtype": "float32"}, {"name": "600", "dtype": "float32"}, {"name": "601", "dtype": "float32"}, {"name": "602", "dtype": "float32"}, {"name": "603", "dtype": "float32"}, {"name": "604", "dtype": "float32"}, {"name": "605", "dtype": "float32"}, {"name": "606", "dtype": "float32"}, {"name": "607", "dtype": "float32"}, {"name": "608", "dtype": "float32"}, {"name": "609", "dtype": "float32"}, {"name": "610", "dtype": "float32"}, {"name": "611", "dtype": "float32"}, {"name": "612", "dtype": "float32"}, {"name": "613", "dtype": "float32"}, {"name": "614", "dtype": "float32"}, {"name": "615", "dtype": "float32"}, {"name": "616", "dtype": "float32"}, {"name": "617", "dtype": "float32"}, {"name": "618", "dtype": "float32"}, {"name": "619", "dtype": "float32"}, {"name": "620", "dtype": "float32"}, {"name": "621", "dtype": "float32"}, {"name": "622", "dtype": "float32"}, {"name": "623", "dtype": "float32"}, {"name": "624", "dtype": "float32"}, {"name": "625", "dtype": "float32"}, {"name": "626", "dtype": "float32"}, {"name": "627", "dtype": "float32"}, {"name": "628", "dtype": "float32"}, {"name": "629", "dtype": "float32"}, {"name": "630", "dtype": "float32"}, {"name": "631", "dtype": "float32"}, {"name": "632", "dtype": "float32"}, {"name": "633", "dtype": "float32"}, {"name": "634", "dtype": "float32"}, {"name": "635", "dtype": "float32"}, {"name": "636", "dtype": "float32"}, {"name": "637", "dtype": "float32"}, {"name": "638", "dtype": "float32"}, {"name": "639", "dtype": "float32"}, {"name": "640", "dtype": "float32"}, {"name": "641", "dtype": "float32"}, {"name": "642", "dtype": "float32"}, {"name": "643", "dtype": "float32"}, {"name": "644", "dtype": "float32"}, {"name": "645", "dtype": "float32"}, {"name": "646", "dtype": "float32"}, {"name": "647", "dtype": "float32"}, {"name": "648", "dtype": "float32"}, {"name": "649", "dtype": "float32"}, {"name": "650", "dtype": "float32"}, {"name": "651", "dtype": "float32"}, {"name": "652", "dtype": "float32"}, {"name": "653", "dtype": "float32"}, {"name": "654", "dtype": "float32"}, {"name": "655", "dtype": "float32"}, {"name": "656", "dtype": "float32"}, {"name": "657", "dtype": "float32"}, {"name": "658", "dtype": "float32"}, {"name": "659", "dtype": "float32"}, {"name": "660", "dtype": "float32"}, {"name": "661", "dtype": "float32"}, {"name": "662", "dtype": "float32"}, {"name": "663", "dtype": "float32"}, {"name": "664", "dtype": "float32"}, {"name": "665", "dtype": "float32"}, {"name": "666", "dtype": "float32"}, {"name": "667", "dtype": "float32"}, {"name": "668", "dtype": "float32"}, {"name": "669", "dtype": "float32"}, {"name": "670", "dtype": "float32"}, {"name": "671", "dtype": "float32"}, {"name": "672", "dtype": "float32"}, {"name": "673", "dtype": "float32"}, {"name": "674", "dtype": "float32"}, {"name": "675", "dtype": "float32"}, {"name": "676", "dtype": "float32"}, {"name": "677", "dtype": "float32"}, {"name": "678", "dtype": "float32"}, {"name": "679", "dtype": "float32"}, {"name": "680", "dtype": "float32"}, {"name": "681", "dtype": "float32"}, {"name": "682", "dtype": "float32"}, {"name": "683", "dtype": "float32"}, {"name": "684", "dtype": "float32"}, {"name": "685", "dtype": "float32"}, {"name": "686", "dtype": "float32"}, {"name": "687", "dtype": "float32"}, {"name": "688", "dtype": "float32"}, {"name": "689", "dtype": "float32"}, {"name": "690", "dtype": "float32"}, {"name": "691", "dtype": "float32"}, {"name": "692", "dtype": "float32"}, {"name": "693", "dtype": "float32"}, {"name": "694", "dtype": "float32"}, {"name": "695", "dtype": "float32"}, {"name": "696", "dtype": "float32"}, {"name": "697", "dtype": "float32"}, {"name": "698", "dtype": "float32"}, {"name": "699", "dtype": "float32"}, {"name": "700", "dtype": "float32"}, {"name": "701", "dtype": "float32"}, {"name": "702", "dtype": "float32"}, {"name": "703", "dtype": "float32"}, {"name": "704", "dtype": "float32"}, {"name": "705", "dtype": "float32"}, {"name": "706", "dtype": "float32"}, {"name": "707", "dtype": "float32"}, {"name": "708", "dtype": "float32"}, {"name": "709", "dtype": "float32"}, {"name": "710", "dtype": "float32"}, {"name": "711", "dtype": "float32"}, {"name": "712", "dtype": "float32"}, {"name": "713", "dtype": "float32"}, {"name": "714", "dtype": "float32"}, {"name": "715", "dtype": "float32"}, {"name": "716", "dtype": "float32"}, {"name": "717", "dtype": "float32"}, {"name": "718", "dtype": "float32"}, {"name": "719", "dtype": "float32"}, {"name": "720", "dtype": "float32"}, {"name": "721", "dtype": "float32"}, {"name": "722", "dtype": "float32"}, {"name": "723", "dtype": "float32"}, {"name": "724", "dtype": "float32"}, {"name": "725", "dtype": "float32"}, {"name": "726", "dtype": "float32"}, {"name": "727", "dtype": "float32"}, {"name": "728", "dtype": "float32"}, {"name": "729", "dtype": "float32"}, {"name": "730", "dtype": "float32"}, {"name": "731", "dtype": "float32"}, {"name": "732", "dtype": "float32"}, {"name": "733", "dtype": "float32"}, {"name": "734", "dtype": "float32"}, {"name": "735", "dtype": "float32"}, {"name": "736", "dtype": "float32"}, {"name": "737", "dtype": "float32"}, {"name": "738", "dtype": "float32"}, {"name": "739", "dtype": "float32"}, {"name": "740", "dtype": "float32"}, {"name": "741", "dtype": "float32"}, {"name": "742", "dtype": "float32"}, {"name": "743", "dtype": "float32"}, {"name": "744", "dtype": "float32"}, {"name": "745", "dtype": "float32"}, {"name": "746", "dtype": "float32"}, {"name": "747", "dtype": "float32"}, {"name": "748", "dtype": "float32"}, {"name": "749", "dtype": "float32"}, {"name": "750", "dtype": "float32"}, {"name": "751", "dtype": "float32"}, {"name": "752", "dtype": "float32"}, {"name": "753", "dtype": "float32"}, {"name": "754", "dtype": "float32"}, {"name": "755", "dtype": "float32"}, {"name": "756", "dtype": "float32"}, {"name": "757", "dtype": "float32"}, {"name": "758", "dtype": "float32"}, {"name": "759", "dtype": "float32"}, {"name": "760", "dtype": "float32"}, {"name": "761", "dtype": "float32"}, {"name": "762", "dtype": "float32"}, {"name": "763", "dtype": "float32"}, {"name": "764", "dtype": "float32"}, {"name": "765", "dtype": "float32"}, {"name": "766", "dtype": "float32"}, {"name": "767", "dtype": "float32"}, {"name": "label", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 115650065.625, "num_examples": 37500}, {"name": "test", "num_bytes": 38550020.0, "num_examples": 12500}], "download_size": 211786903, "dataset_size": 154200085.625}}
2023-08-18T16:34:21+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Spirit_DistilRoBERTa_Baseline" More Information needed
[ "# Dataset Card for \"Spirit_DistilRoBERTa_Baseline\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Spirit_DistilRoBERTa_Baseline\"\n\nMore Information needed" ]
[ 6, 22 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"Spirit_DistilRoBERTa_Baseline\"\n\nMore Information needed" ]
285c22f60202305cb9893eccc6162e3abea684b5
# Dataset Card for "stratio-doc" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yvillamil/stratio-doc
[ "region:us" ]
2023-08-18T16:33:48+00:00
{"dataset_info": {"features": [{"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 41578, "num_examples": 3}], "download_size": 13650, "dataset_size": 41578}}
2023-08-18T16:33:49+00:00
[]
[]
TAGS #region-us
# Dataset Card for "stratio-doc" More Information needed
[ "# Dataset Card for \"stratio-doc\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"stratio-doc\"\n\nMore Information needed" ]
[ 6, 14 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"stratio-doc\"\n\nMore Information needed" ]
7d1da88705224832de5c74e126d6c09e7259dfc6
# Dataset Card for "Spirit_GPT2_Baseline" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
EgilKarlsen/Spirit_GPT2_Baseline
[ "region:us" ]
2023-08-18T16:35:33+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "0", "dtype": "float32"}, {"name": "1", "dtype": "float32"}, {"name": "2", "dtype": "float32"}, {"name": "3", "dtype": "float32"}, {"name": "4", "dtype": "float32"}, {"name": "5", "dtype": "float32"}, {"name": "6", "dtype": "float32"}, {"name": "7", "dtype": "float32"}, {"name": "8", "dtype": "float32"}, {"name": "9", "dtype": "float32"}, {"name": "10", "dtype": "float32"}, {"name": "11", "dtype": "float32"}, {"name": "12", "dtype": "float32"}, {"name": "13", "dtype": "float32"}, {"name": "14", "dtype": "float32"}, {"name": "15", "dtype": "float32"}, {"name": "16", "dtype": "float32"}, {"name": "17", "dtype": "float32"}, {"name": "18", "dtype": "float32"}, {"name": "19", "dtype": "float32"}, {"name": "20", "dtype": "float32"}, {"name": "21", "dtype": "float32"}, {"name": "22", "dtype": "float32"}, {"name": "23", "dtype": "float32"}, {"name": "24", "dtype": "float32"}, {"name": "25", "dtype": "float32"}, {"name": "26", "dtype": "float32"}, {"name": "27", "dtype": "float32"}, {"name": "28", "dtype": "float32"}, {"name": "29", "dtype": "float32"}, {"name": "30", "dtype": "float32"}, {"name": "31", "dtype": "float32"}, {"name": "32", "dtype": "float32"}, {"name": "33", "dtype": "float32"}, {"name": "34", "dtype": "float32"}, {"name": "35", "dtype": "float32"}, {"name": "36", "dtype": "float32"}, {"name": "37", "dtype": "float32"}, {"name": "38", "dtype": "float32"}, {"name": "39", "dtype": "float32"}, {"name": "40", "dtype": "float32"}, {"name": "41", "dtype": "float32"}, {"name": "42", "dtype": "float32"}, {"name": "43", "dtype": "float32"}, {"name": "44", "dtype": "float32"}, {"name": "45", "dtype": "float32"}, {"name": "46", "dtype": "float32"}, {"name": "47", "dtype": "float32"}, {"name": "48", "dtype": "float32"}, {"name": "49", "dtype": "float32"}, {"name": "50", "dtype": "float32"}, {"name": "51", "dtype": "float32"}, {"name": "52", "dtype": "float32"}, {"name": "53", "dtype": "float32"}, {"name": "54", "dtype": "float32"}, {"name": "55", "dtype": "float32"}, {"name": "56", "dtype": "float32"}, {"name": "57", "dtype": "float32"}, {"name": "58", "dtype": "float32"}, {"name": "59", "dtype": "float32"}, {"name": "60", "dtype": "float32"}, {"name": "61", "dtype": "float32"}, {"name": "62", "dtype": "float32"}, {"name": "63", "dtype": "float32"}, {"name": "64", "dtype": "float32"}, {"name": "65", "dtype": "float32"}, {"name": "66", "dtype": "float32"}, {"name": "67", "dtype": "float32"}, {"name": "68", "dtype": "float32"}, {"name": "69", "dtype": "float32"}, {"name": "70", "dtype": "float32"}, {"name": "71", "dtype": "float32"}, {"name": "72", "dtype": "float32"}, {"name": "73", "dtype": "float32"}, {"name": "74", "dtype": "float32"}, {"name": "75", "dtype": "float32"}, {"name": "76", "dtype": "float32"}, {"name": "77", "dtype": "float32"}, {"name": "78", "dtype": "float32"}, {"name": "79", "dtype": "float32"}, {"name": "80", "dtype": "float32"}, {"name": "81", "dtype": "float32"}, {"name": "82", "dtype": "float32"}, {"name": "83", "dtype": "float32"}, {"name": "84", "dtype": "float32"}, {"name": "85", "dtype": "float32"}, {"name": "86", "dtype": "float32"}, {"name": "87", "dtype": "float32"}, {"name": "88", "dtype": "float32"}, {"name": "89", "dtype": "float32"}, {"name": "90", "dtype": "float32"}, {"name": "91", "dtype": "float32"}, {"name": "92", "dtype": "float32"}, {"name": "93", "dtype": "float32"}, {"name": "94", "dtype": "float32"}, {"name": "95", "dtype": "float32"}, {"name": "96", "dtype": "float32"}, {"name": "97", "dtype": "float32"}, {"name": "98", "dtype": "float32"}, {"name": "99", "dtype": "float32"}, {"name": "100", "dtype": "float32"}, {"name": "101", "dtype": "float32"}, {"name": "102", "dtype": "float32"}, {"name": "103", "dtype": "float32"}, {"name": "104", "dtype": "float32"}, {"name": "105", "dtype": "float32"}, {"name": "106", "dtype": "float32"}, {"name": "107", "dtype": "float32"}, {"name": "108", "dtype": "float32"}, {"name": "109", "dtype": "float32"}, {"name": "110", "dtype": "float32"}, {"name": "111", "dtype": "float32"}, {"name": "112", "dtype": "float32"}, {"name": "113", "dtype": "float32"}, {"name": "114", "dtype": "float32"}, {"name": "115", "dtype": "float32"}, {"name": "116", "dtype": "float32"}, {"name": "117", "dtype": "float32"}, {"name": "118", "dtype": "float32"}, {"name": "119", "dtype": "float32"}, {"name": "120", "dtype": "float32"}, {"name": "121", "dtype": "float32"}, {"name": "122", "dtype": "float32"}, {"name": "123", "dtype": "float32"}, {"name": "124", "dtype": "float32"}, {"name": "125", "dtype": "float32"}, {"name": "126", "dtype": "float32"}, {"name": "127", "dtype": "float32"}, {"name": "128", "dtype": "float32"}, {"name": "129", "dtype": "float32"}, {"name": "130", "dtype": "float32"}, {"name": "131", "dtype": "float32"}, {"name": "132", "dtype": "float32"}, {"name": "133", "dtype": "float32"}, {"name": "134", "dtype": "float32"}, {"name": "135", "dtype": "float32"}, {"name": "136", "dtype": "float32"}, {"name": "137", "dtype": "float32"}, {"name": "138", "dtype": "float32"}, {"name": "139", "dtype": "float32"}, {"name": "140", "dtype": "float32"}, {"name": "141", "dtype": "float32"}, {"name": "142", "dtype": "float32"}, {"name": "143", "dtype": "float32"}, {"name": "144", "dtype": "float32"}, {"name": "145", "dtype": "float32"}, {"name": "146", "dtype": "float32"}, {"name": "147", "dtype": "float32"}, {"name": "148", "dtype": "float32"}, {"name": "149", "dtype": "float32"}, {"name": "150", "dtype": "float32"}, {"name": "151", "dtype": "float32"}, {"name": "152", "dtype": "float32"}, {"name": "153", "dtype": "float32"}, {"name": "154", "dtype": "float32"}, {"name": "155", "dtype": "float32"}, {"name": "156", "dtype": "float32"}, {"name": "157", "dtype": "float32"}, {"name": "158", "dtype": "float32"}, {"name": "159", "dtype": "float32"}, {"name": "160", "dtype": "float32"}, {"name": "161", "dtype": "float32"}, {"name": "162", "dtype": "float32"}, {"name": "163", "dtype": "float32"}, {"name": "164", "dtype": "float32"}, {"name": "165", "dtype": "float32"}, {"name": "166", "dtype": "float32"}, {"name": "167", "dtype": "float32"}, {"name": "168", "dtype": "float32"}, {"name": "169", "dtype": "float32"}, {"name": "170", "dtype": "float32"}, {"name": "171", "dtype": "float32"}, {"name": "172", "dtype": "float32"}, {"name": "173", "dtype": "float32"}, {"name": "174", "dtype": "float32"}, {"name": "175", "dtype": "float32"}, {"name": "176", "dtype": "float32"}, {"name": "177", "dtype": "float32"}, {"name": "178", "dtype": "float32"}, {"name": "179", "dtype": "float32"}, {"name": "180", "dtype": "float32"}, {"name": "181", "dtype": "float32"}, {"name": "182", "dtype": "float32"}, {"name": "183", "dtype": "float32"}, {"name": "184", "dtype": "float32"}, {"name": "185", "dtype": "float32"}, {"name": "186", "dtype": "float32"}, {"name": "187", "dtype": "float32"}, {"name": "188", "dtype": "float32"}, {"name": "189", "dtype": "float32"}, {"name": "190", "dtype": "float32"}, {"name": "191", "dtype": "float32"}, {"name": "192", "dtype": "float32"}, {"name": "193", "dtype": "float32"}, {"name": "194", "dtype": "float32"}, {"name": "195", "dtype": "float32"}, {"name": "196", "dtype": "float32"}, {"name": "197", "dtype": "float32"}, {"name": "198", "dtype": "float32"}, {"name": "199", "dtype": "float32"}, {"name": "200", "dtype": "float32"}, {"name": "201", "dtype": "float32"}, {"name": "202", "dtype": "float32"}, {"name": "203", "dtype": "float32"}, {"name": "204", "dtype": "float32"}, {"name": "205", "dtype": "float32"}, {"name": "206", "dtype": "float32"}, {"name": "207", "dtype": "float32"}, {"name": "208", "dtype": "float32"}, {"name": "209", "dtype": "float32"}, {"name": "210", "dtype": "float32"}, {"name": "211", "dtype": "float32"}, {"name": "212", "dtype": "float32"}, {"name": "213", "dtype": "float32"}, {"name": "214", "dtype": "float32"}, {"name": "215", "dtype": "float32"}, {"name": "216", "dtype": "float32"}, {"name": "217", "dtype": "float32"}, {"name": "218", "dtype": "float32"}, {"name": "219", "dtype": "float32"}, {"name": "220", "dtype": "float32"}, {"name": "221", "dtype": "float32"}, {"name": "222", "dtype": "float32"}, {"name": "223", "dtype": "float32"}, {"name": "224", "dtype": "float32"}, {"name": "225", "dtype": "float32"}, {"name": "226", "dtype": "float32"}, {"name": "227", "dtype": "float32"}, {"name": "228", "dtype": "float32"}, {"name": "229", "dtype": "float32"}, {"name": "230", "dtype": "float32"}, {"name": "231", "dtype": "float32"}, {"name": "232", "dtype": "float32"}, {"name": "233", "dtype": "float32"}, {"name": "234", "dtype": "float32"}, {"name": "235", "dtype": "float32"}, {"name": "236", "dtype": "float32"}, {"name": "237", "dtype": "float32"}, {"name": "238", "dtype": "float32"}, {"name": "239", "dtype": "float32"}, {"name": "240", "dtype": "float32"}, {"name": "241", "dtype": "float32"}, {"name": "242", "dtype": "float32"}, {"name": "243", "dtype": "float32"}, {"name": "244", "dtype": "float32"}, {"name": "245", "dtype": "float32"}, {"name": "246", "dtype": "float32"}, {"name": "247", "dtype": "float32"}, {"name": "248", "dtype": "float32"}, {"name": "249", "dtype": "float32"}, {"name": "250", "dtype": "float32"}, {"name": "251", "dtype": "float32"}, {"name": "252", "dtype": "float32"}, {"name": "253", "dtype": "float32"}, {"name": "254", "dtype": "float32"}, {"name": "255", "dtype": "float32"}, {"name": "256", "dtype": "float32"}, {"name": "257", "dtype": "float32"}, {"name": "258", "dtype": "float32"}, {"name": "259", "dtype": "float32"}, {"name": "260", "dtype": "float32"}, {"name": "261", "dtype": "float32"}, {"name": "262", "dtype": "float32"}, {"name": "263", "dtype": "float32"}, {"name": "264", "dtype": "float32"}, {"name": "265", "dtype": "float32"}, {"name": "266", "dtype": "float32"}, {"name": "267", "dtype": "float32"}, {"name": "268", "dtype": "float32"}, {"name": "269", "dtype": "float32"}, {"name": "270", "dtype": "float32"}, {"name": "271", "dtype": "float32"}, {"name": "272", "dtype": "float32"}, {"name": "273", "dtype": "float32"}, {"name": "274", "dtype": "float32"}, {"name": "275", "dtype": "float32"}, {"name": "276", "dtype": "float32"}, {"name": "277", "dtype": "float32"}, {"name": "278", "dtype": "float32"}, {"name": "279", "dtype": "float32"}, {"name": "280", "dtype": "float32"}, {"name": "281", "dtype": "float32"}, {"name": "282", "dtype": "float32"}, {"name": "283", "dtype": "float32"}, {"name": "284", "dtype": "float32"}, {"name": "285", "dtype": "float32"}, {"name": "286", "dtype": "float32"}, {"name": "287", "dtype": "float32"}, {"name": "288", "dtype": "float32"}, {"name": "289", "dtype": "float32"}, {"name": "290", "dtype": "float32"}, {"name": "291", "dtype": "float32"}, {"name": "292", "dtype": "float32"}, {"name": "293", "dtype": "float32"}, {"name": "294", "dtype": "float32"}, {"name": "295", "dtype": "float32"}, {"name": "296", "dtype": "float32"}, {"name": "297", "dtype": "float32"}, {"name": "298", "dtype": "float32"}, {"name": "299", "dtype": "float32"}, {"name": "300", "dtype": "float32"}, {"name": "301", "dtype": "float32"}, {"name": "302", "dtype": "float32"}, {"name": "303", "dtype": "float32"}, {"name": "304", "dtype": "float32"}, {"name": "305", "dtype": "float32"}, {"name": "306", "dtype": "float32"}, {"name": "307", "dtype": "float32"}, {"name": "308", "dtype": "float32"}, {"name": "309", "dtype": "float32"}, {"name": "310", "dtype": "float32"}, {"name": "311", "dtype": "float32"}, {"name": "312", "dtype": "float32"}, {"name": "313", "dtype": "float32"}, {"name": "314", "dtype": "float32"}, {"name": "315", "dtype": "float32"}, {"name": "316", "dtype": "float32"}, {"name": "317", "dtype": "float32"}, {"name": "318", "dtype": "float32"}, {"name": "319", "dtype": "float32"}, {"name": "320", "dtype": "float32"}, {"name": "321", "dtype": "float32"}, {"name": "322", "dtype": "float32"}, {"name": "323", "dtype": "float32"}, {"name": "324", "dtype": "float32"}, {"name": "325", "dtype": "float32"}, {"name": "326", "dtype": "float32"}, {"name": "327", "dtype": "float32"}, {"name": "328", "dtype": "float32"}, {"name": "329", "dtype": "float32"}, {"name": "330", "dtype": "float32"}, {"name": "331", "dtype": "float32"}, {"name": "332", "dtype": "float32"}, {"name": "333", "dtype": "float32"}, {"name": "334", "dtype": "float32"}, {"name": "335", "dtype": "float32"}, {"name": "336", "dtype": "float32"}, {"name": "337", "dtype": "float32"}, {"name": "338", "dtype": "float32"}, {"name": "339", "dtype": "float32"}, {"name": "340", "dtype": "float32"}, {"name": "341", "dtype": "float32"}, {"name": "342", "dtype": "float32"}, {"name": "343", "dtype": "float32"}, {"name": "344", "dtype": "float32"}, {"name": "345", "dtype": "float32"}, {"name": "346", "dtype": "float32"}, {"name": "347", "dtype": "float32"}, {"name": "348", "dtype": "float32"}, {"name": "349", "dtype": "float32"}, {"name": "350", "dtype": "float32"}, {"name": "351", "dtype": "float32"}, {"name": "352", "dtype": "float32"}, {"name": "353", "dtype": "float32"}, {"name": "354", "dtype": "float32"}, {"name": "355", "dtype": "float32"}, {"name": "356", "dtype": "float32"}, {"name": "357", "dtype": "float32"}, {"name": "358", "dtype": "float32"}, {"name": "359", "dtype": "float32"}, {"name": "360", "dtype": "float32"}, {"name": "361", "dtype": "float32"}, {"name": "362", "dtype": "float32"}, {"name": "363", "dtype": "float32"}, {"name": "364", "dtype": "float32"}, {"name": "365", "dtype": "float32"}, {"name": "366", "dtype": "float32"}, {"name": "367", "dtype": "float32"}, {"name": "368", "dtype": "float32"}, {"name": "369", "dtype": "float32"}, {"name": "370", "dtype": "float32"}, {"name": "371", "dtype": "float32"}, {"name": "372", "dtype": "float32"}, {"name": "373", "dtype": "float32"}, {"name": "374", "dtype": "float32"}, {"name": "375", "dtype": "float32"}, {"name": "376", "dtype": "float32"}, {"name": "377", "dtype": "float32"}, {"name": "378", "dtype": "float32"}, {"name": "379", "dtype": "float32"}, {"name": "380", "dtype": "float32"}, {"name": "381", "dtype": "float32"}, {"name": "382", "dtype": "float32"}, {"name": "383", "dtype": "float32"}, {"name": "384", "dtype": "float32"}, {"name": "385", "dtype": "float32"}, {"name": "386", "dtype": "float32"}, {"name": "387", "dtype": "float32"}, {"name": "388", "dtype": "float32"}, {"name": "389", "dtype": "float32"}, {"name": "390", "dtype": "float32"}, {"name": "391", "dtype": "float32"}, {"name": "392", "dtype": "float32"}, {"name": "393", "dtype": "float32"}, {"name": "394", "dtype": "float32"}, {"name": "395", "dtype": "float32"}, {"name": "396", "dtype": "float32"}, {"name": "397", "dtype": "float32"}, {"name": "398", "dtype": "float32"}, {"name": "399", "dtype": "float32"}, {"name": "400", "dtype": "float32"}, {"name": "401", "dtype": "float32"}, {"name": "402", "dtype": "float32"}, {"name": "403", "dtype": "float32"}, {"name": "404", "dtype": "float32"}, {"name": "405", "dtype": "float32"}, {"name": "406", "dtype": "float32"}, {"name": "407", "dtype": "float32"}, {"name": "408", "dtype": "float32"}, {"name": "409", "dtype": "float32"}, {"name": "410", "dtype": "float32"}, {"name": "411", "dtype": "float32"}, {"name": "412", "dtype": "float32"}, {"name": "413", "dtype": "float32"}, {"name": "414", "dtype": "float32"}, {"name": "415", "dtype": "float32"}, {"name": "416", "dtype": "float32"}, {"name": "417", "dtype": "float32"}, {"name": "418", "dtype": "float32"}, {"name": "419", "dtype": "float32"}, {"name": "420", "dtype": "float32"}, {"name": "421", "dtype": "float32"}, {"name": "422", "dtype": "float32"}, {"name": "423", "dtype": "float32"}, {"name": "424", "dtype": "float32"}, {"name": "425", "dtype": "float32"}, {"name": "426", "dtype": "float32"}, {"name": "427", "dtype": "float32"}, {"name": "428", "dtype": "float32"}, {"name": "429", "dtype": "float32"}, {"name": "430", "dtype": "float32"}, {"name": "431", "dtype": "float32"}, {"name": "432", "dtype": "float32"}, {"name": "433", "dtype": "float32"}, {"name": "434", "dtype": "float32"}, {"name": "435", "dtype": "float32"}, {"name": "436", "dtype": "float32"}, {"name": "437", "dtype": "float32"}, {"name": "438", "dtype": "float32"}, {"name": "439", "dtype": "float32"}, {"name": "440", "dtype": "float32"}, {"name": "441", "dtype": "float32"}, {"name": "442", "dtype": "float32"}, {"name": "443", "dtype": "float32"}, {"name": "444", "dtype": "float32"}, {"name": "445", "dtype": "float32"}, {"name": "446", "dtype": "float32"}, {"name": "447", "dtype": "float32"}, {"name": "448", "dtype": "float32"}, {"name": "449", "dtype": "float32"}, {"name": "450", "dtype": "float32"}, {"name": "451", "dtype": "float32"}, {"name": "452", "dtype": "float32"}, {"name": "453", "dtype": "float32"}, {"name": "454", "dtype": "float32"}, {"name": "455", "dtype": "float32"}, {"name": "456", "dtype": "float32"}, {"name": "457", "dtype": "float32"}, {"name": "458", "dtype": "float32"}, {"name": "459", "dtype": "float32"}, {"name": "460", "dtype": "float32"}, {"name": "461", "dtype": "float32"}, {"name": "462", "dtype": "float32"}, {"name": "463", "dtype": "float32"}, {"name": "464", "dtype": "float32"}, {"name": "465", "dtype": "float32"}, {"name": "466", "dtype": "float32"}, {"name": "467", "dtype": "float32"}, {"name": "468", "dtype": "float32"}, {"name": "469", "dtype": "float32"}, {"name": "470", "dtype": "float32"}, {"name": "471", "dtype": "float32"}, {"name": "472", "dtype": "float32"}, {"name": "473", "dtype": "float32"}, {"name": "474", "dtype": "float32"}, {"name": "475", "dtype": "float32"}, {"name": "476", "dtype": "float32"}, {"name": "477", "dtype": "float32"}, {"name": "478", "dtype": "float32"}, {"name": "479", "dtype": "float32"}, {"name": "480", "dtype": "float32"}, {"name": "481", "dtype": "float32"}, {"name": "482", "dtype": "float32"}, {"name": "483", "dtype": "float32"}, {"name": "484", "dtype": "float32"}, {"name": "485", "dtype": "float32"}, {"name": "486", "dtype": "float32"}, {"name": "487", "dtype": "float32"}, {"name": "488", "dtype": "float32"}, {"name": "489", "dtype": "float32"}, {"name": "490", "dtype": "float32"}, {"name": "491", "dtype": "float32"}, {"name": "492", "dtype": "float32"}, {"name": "493", "dtype": "float32"}, {"name": "494", "dtype": "float32"}, {"name": "495", "dtype": "float32"}, {"name": "496", "dtype": "float32"}, {"name": "497", "dtype": "float32"}, {"name": "498", "dtype": "float32"}, {"name": "499", "dtype": "float32"}, {"name": "500", "dtype": "float32"}, {"name": "501", "dtype": "float32"}, {"name": "502", "dtype": "float32"}, {"name": "503", "dtype": "float32"}, {"name": "504", "dtype": "float32"}, {"name": "505", "dtype": "float32"}, {"name": "506", "dtype": "float32"}, {"name": "507", "dtype": "float32"}, {"name": "508", "dtype": "float32"}, {"name": "509", "dtype": "float32"}, {"name": "510", "dtype": "float32"}, {"name": "511", "dtype": "float32"}, {"name": "512", "dtype": "float32"}, {"name": "513", "dtype": "float32"}, {"name": "514", "dtype": "float32"}, {"name": "515", "dtype": "float32"}, {"name": "516", "dtype": "float32"}, {"name": "517", "dtype": "float32"}, {"name": "518", "dtype": "float32"}, {"name": "519", "dtype": "float32"}, {"name": "520", "dtype": "float32"}, {"name": "521", "dtype": "float32"}, {"name": "522", "dtype": "float32"}, {"name": "523", "dtype": "float32"}, {"name": "524", "dtype": "float32"}, {"name": "525", "dtype": "float32"}, {"name": "526", "dtype": "float32"}, {"name": "527", "dtype": "float32"}, {"name": "528", "dtype": "float32"}, {"name": "529", "dtype": "float32"}, {"name": "530", "dtype": "float32"}, {"name": "531", "dtype": "float32"}, {"name": "532", "dtype": "float32"}, {"name": "533", "dtype": "float32"}, {"name": "534", "dtype": "float32"}, {"name": "535", "dtype": "float32"}, {"name": "536", "dtype": "float32"}, {"name": "537", "dtype": "float32"}, {"name": "538", "dtype": "float32"}, {"name": "539", "dtype": "float32"}, {"name": "540", "dtype": "float32"}, {"name": "541", "dtype": "float32"}, {"name": "542", "dtype": "float32"}, {"name": "543", "dtype": "float32"}, {"name": "544", "dtype": "float32"}, {"name": "545", "dtype": "float32"}, {"name": "546", "dtype": "float32"}, {"name": "547", "dtype": "float32"}, {"name": "548", "dtype": "float32"}, {"name": "549", "dtype": "float32"}, {"name": "550", "dtype": "float32"}, {"name": "551", "dtype": "float32"}, {"name": "552", "dtype": "float32"}, {"name": "553", "dtype": "float32"}, {"name": "554", "dtype": "float32"}, {"name": "555", "dtype": "float32"}, {"name": "556", "dtype": "float32"}, {"name": "557", "dtype": "float32"}, {"name": "558", "dtype": "float32"}, {"name": "559", "dtype": "float32"}, {"name": "560", "dtype": "float32"}, {"name": "561", "dtype": "float32"}, {"name": "562", "dtype": "float32"}, {"name": "563", "dtype": "float32"}, {"name": "564", "dtype": "float32"}, {"name": "565", "dtype": "float32"}, {"name": "566", "dtype": "float32"}, {"name": "567", "dtype": "float32"}, {"name": "568", "dtype": "float32"}, {"name": "569", "dtype": "float32"}, {"name": "570", "dtype": "float32"}, {"name": "571", "dtype": "float32"}, {"name": "572", "dtype": "float32"}, {"name": "573", "dtype": "float32"}, {"name": "574", "dtype": "float32"}, {"name": "575", "dtype": "float32"}, {"name": "576", "dtype": "float32"}, {"name": "577", "dtype": "float32"}, {"name": "578", "dtype": "float32"}, {"name": "579", "dtype": "float32"}, {"name": "580", "dtype": "float32"}, {"name": "581", "dtype": "float32"}, {"name": "582", "dtype": "float32"}, {"name": "583", "dtype": "float32"}, {"name": "584", "dtype": "float32"}, {"name": "585", "dtype": "float32"}, {"name": "586", "dtype": "float32"}, {"name": "587", "dtype": "float32"}, {"name": "588", "dtype": "float32"}, {"name": "589", "dtype": "float32"}, {"name": "590", "dtype": "float32"}, {"name": "591", "dtype": "float32"}, {"name": "592", "dtype": "float32"}, {"name": "593", "dtype": "float32"}, {"name": "594", "dtype": "float32"}, {"name": "595", "dtype": "float32"}, {"name": "596", "dtype": "float32"}, {"name": "597", "dtype": "float32"}, {"name": "598", "dtype": "float32"}, {"name": "599", "dtype": "float32"}, {"name": "600", "dtype": "float32"}, {"name": "601", "dtype": "float32"}, {"name": "602", "dtype": "float32"}, {"name": "603", "dtype": "float32"}, {"name": "604", "dtype": "float32"}, {"name": "605", "dtype": "float32"}, {"name": "606", "dtype": "float32"}, {"name": "607", "dtype": "float32"}, {"name": "608", "dtype": "float32"}, {"name": "609", "dtype": "float32"}, {"name": "610", "dtype": "float32"}, {"name": "611", "dtype": "float32"}, {"name": "612", "dtype": "float32"}, {"name": "613", "dtype": "float32"}, {"name": "614", "dtype": "float32"}, {"name": "615", "dtype": "float32"}, {"name": "616", "dtype": "float32"}, {"name": "617", "dtype": "float32"}, {"name": "618", "dtype": "float32"}, {"name": "619", "dtype": "float32"}, {"name": "620", "dtype": "float32"}, {"name": "621", "dtype": "float32"}, {"name": "622", "dtype": "float32"}, {"name": "623", "dtype": "float32"}, {"name": "624", "dtype": "float32"}, {"name": "625", "dtype": "float32"}, {"name": "626", "dtype": "float32"}, {"name": "627", "dtype": "float32"}, {"name": "628", "dtype": "float32"}, {"name": "629", "dtype": "float32"}, {"name": "630", "dtype": "float32"}, {"name": "631", "dtype": "float32"}, {"name": "632", "dtype": "float32"}, {"name": "633", "dtype": "float32"}, {"name": "634", "dtype": "float32"}, {"name": "635", "dtype": "float32"}, {"name": "636", "dtype": "float32"}, {"name": "637", "dtype": "float32"}, {"name": "638", "dtype": "float32"}, {"name": "639", "dtype": "float32"}, {"name": "640", "dtype": "float32"}, {"name": "641", "dtype": "float32"}, {"name": "642", "dtype": "float32"}, {"name": "643", "dtype": "float32"}, {"name": "644", "dtype": "float32"}, {"name": "645", "dtype": "float32"}, {"name": "646", "dtype": "float32"}, {"name": "647", "dtype": "float32"}, {"name": "648", "dtype": "float32"}, {"name": "649", "dtype": "float32"}, {"name": "650", "dtype": "float32"}, {"name": "651", "dtype": "float32"}, {"name": "652", "dtype": "float32"}, {"name": "653", "dtype": "float32"}, {"name": "654", "dtype": "float32"}, {"name": "655", "dtype": "float32"}, {"name": "656", "dtype": "float32"}, {"name": "657", "dtype": "float32"}, {"name": "658", "dtype": "float32"}, {"name": "659", "dtype": "float32"}, {"name": "660", "dtype": "float32"}, {"name": "661", "dtype": "float32"}, {"name": "662", "dtype": "float32"}, {"name": "663", "dtype": "float32"}, {"name": "664", "dtype": "float32"}, {"name": "665", "dtype": "float32"}, {"name": "666", "dtype": "float32"}, {"name": "667", "dtype": "float32"}, {"name": "668", "dtype": "float32"}, {"name": "669", "dtype": "float32"}, {"name": "670", "dtype": "float32"}, {"name": "671", "dtype": "float32"}, {"name": "672", "dtype": "float32"}, {"name": "673", "dtype": "float32"}, {"name": "674", "dtype": "float32"}, {"name": "675", "dtype": "float32"}, {"name": "676", "dtype": "float32"}, {"name": "677", "dtype": "float32"}, {"name": "678", "dtype": "float32"}, {"name": "679", "dtype": "float32"}, {"name": "680", "dtype": "float32"}, {"name": "681", "dtype": "float32"}, {"name": "682", "dtype": "float32"}, {"name": "683", "dtype": "float32"}, {"name": "684", "dtype": "float32"}, {"name": "685", "dtype": "float32"}, {"name": "686", "dtype": "float32"}, {"name": "687", "dtype": "float32"}, {"name": "688", "dtype": "float32"}, {"name": "689", "dtype": "float32"}, {"name": "690", "dtype": "float32"}, {"name": "691", "dtype": "float32"}, {"name": "692", "dtype": "float32"}, {"name": "693", "dtype": "float32"}, {"name": "694", "dtype": "float32"}, {"name": "695", "dtype": "float32"}, {"name": "696", "dtype": "float32"}, {"name": "697", "dtype": "float32"}, {"name": "698", "dtype": "float32"}, {"name": "699", "dtype": "float32"}, {"name": "700", "dtype": "float32"}, {"name": "701", "dtype": "float32"}, {"name": "702", "dtype": "float32"}, {"name": "703", "dtype": "float32"}, {"name": "704", "dtype": "float32"}, {"name": "705", "dtype": "float32"}, {"name": "706", "dtype": "float32"}, {"name": "707", "dtype": "float32"}, {"name": "708", "dtype": "float32"}, {"name": "709", "dtype": "float32"}, {"name": "710", "dtype": "float32"}, {"name": "711", "dtype": "float32"}, {"name": "712", "dtype": "float32"}, {"name": "713", "dtype": "float32"}, {"name": "714", "dtype": "float32"}, {"name": "715", "dtype": "float32"}, {"name": "716", "dtype": "float32"}, {"name": "717", "dtype": "float32"}, {"name": "718", "dtype": "float32"}, {"name": "719", "dtype": "float32"}, {"name": "720", "dtype": "float32"}, {"name": "721", "dtype": "float32"}, {"name": "722", "dtype": "float32"}, {"name": "723", "dtype": "float32"}, {"name": "724", "dtype": "float32"}, {"name": "725", "dtype": "float32"}, {"name": "726", "dtype": "float32"}, {"name": "727", "dtype": "float32"}, {"name": "728", "dtype": "float32"}, {"name": "729", "dtype": "float32"}, {"name": "730", "dtype": "float32"}, {"name": "731", "dtype": "float32"}, {"name": "732", "dtype": "float32"}, {"name": "733", "dtype": "float32"}, {"name": "734", "dtype": "float32"}, {"name": "735", "dtype": "float32"}, {"name": "736", "dtype": "float32"}, {"name": "737", "dtype": "float32"}, {"name": "738", "dtype": "float32"}, {"name": "739", "dtype": "float32"}, {"name": "740", "dtype": "float32"}, {"name": "741", "dtype": "float32"}, {"name": "742", "dtype": "float32"}, {"name": "743", "dtype": "float32"}, {"name": "744", "dtype": "float32"}, {"name": "745", "dtype": "float32"}, {"name": "746", "dtype": "float32"}, {"name": "747", "dtype": "float32"}, {"name": "748", "dtype": "float32"}, {"name": "749", "dtype": "float32"}, {"name": "750", "dtype": "float32"}, {"name": "751", "dtype": "float32"}, {"name": "752", "dtype": "float32"}, {"name": "753", "dtype": "float32"}, {"name": "754", "dtype": "float32"}, {"name": "755", "dtype": "float32"}, {"name": "756", "dtype": "float32"}, {"name": "757", "dtype": "float32"}, {"name": "758", "dtype": "float32"}, {"name": "759", "dtype": "float32"}, {"name": "760", "dtype": "float32"}, {"name": "761", "dtype": "float32"}, {"name": "762", "dtype": "float32"}, {"name": "763", "dtype": "float32"}, {"name": "764", "dtype": "float32"}, {"name": "765", "dtype": "float32"}, {"name": "766", "dtype": "float32"}, {"name": "767", "dtype": "float32"}, {"name": "label", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 115650065.625, "num_examples": 37500}, {"name": "test", "num_bytes": 38550020.0, "num_examples": 12500}], "download_size": 211782412, "dataset_size": 154200085.625}}
2023-08-18T16:41:00+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Spirit_GPT2_Baseline" More Information needed
[ "# Dataset Card for \"Spirit_GPT2_Baseline\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Spirit_GPT2_Baseline\"\n\nMore Information needed" ]
[ 6, 20 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"Spirit_GPT2_Baseline\"\n\nMore Information needed" ]
eb61b33445c9df8d6a887dc11392d5fd754997f3
# Dataset Card for "Spirit_GPTNEO_Baseline" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
EgilKarlsen/Spirit_GPTNEO_Baseline
[ "region:us" ]
2023-08-18T16:50:54+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "0", "dtype": "float32"}, {"name": "1", "dtype": "float32"}, {"name": "2", "dtype": "float32"}, {"name": "3", "dtype": "float32"}, {"name": "4", "dtype": "float32"}, {"name": "5", "dtype": "float32"}, {"name": "6", "dtype": "float32"}, {"name": "7", "dtype": "float32"}, {"name": "8", "dtype": "float32"}, {"name": "9", "dtype": "float32"}, {"name": "10", "dtype": "float32"}, {"name": "11", "dtype": "float32"}, {"name": "12", "dtype": "float32"}, {"name": "13", "dtype": "float32"}, {"name": "14", "dtype": "float32"}, {"name": "15", "dtype": "float32"}, {"name": "16", "dtype": "float32"}, {"name": "17", "dtype": "float32"}, {"name": "18", "dtype": "float32"}, {"name": "19", "dtype": "float32"}, {"name": "20", "dtype": "float32"}, {"name": "21", "dtype": "float32"}, {"name": "22", "dtype": "float32"}, {"name": "23", "dtype": "float32"}, {"name": "24", "dtype": "float32"}, {"name": "25", "dtype": "float32"}, {"name": "26", "dtype": "float32"}, {"name": "27", "dtype": "float32"}, {"name": "28", "dtype": "float32"}, {"name": "29", "dtype": "float32"}, {"name": "30", "dtype": "float32"}, {"name": "31", "dtype": "float32"}, {"name": "32", "dtype": "float32"}, {"name": "33", "dtype": "float32"}, {"name": "34", "dtype": "float32"}, {"name": "35", "dtype": "float32"}, {"name": "36", "dtype": "float32"}, {"name": "37", "dtype": "float32"}, {"name": "38", "dtype": "float32"}, {"name": "39", "dtype": "float32"}, {"name": "40", "dtype": "float32"}, {"name": "41", "dtype": "float32"}, {"name": "42", "dtype": "float32"}, {"name": "43", "dtype": "float32"}, {"name": "44", "dtype": "float32"}, {"name": "45", "dtype": "float32"}, {"name": "46", "dtype": "float32"}, {"name": "47", "dtype": "float32"}, {"name": "48", "dtype": "float32"}, {"name": "49", "dtype": "float32"}, {"name": "50", "dtype": "float32"}, {"name": "51", "dtype": "float32"}, {"name": "52", "dtype": "float32"}, {"name": "53", "dtype": "float32"}, {"name": "54", "dtype": "float32"}, {"name": "55", "dtype": "float32"}, {"name": "56", "dtype": "float32"}, {"name": "57", "dtype": "float32"}, {"name": "58", "dtype": "float32"}, {"name": "59", "dtype": "float32"}, {"name": "60", "dtype": "float32"}, {"name": "61", "dtype": "float32"}, {"name": "62", "dtype": "float32"}, {"name": "63", "dtype": "float32"}, {"name": "64", "dtype": "float32"}, {"name": "65", "dtype": "float32"}, {"name": "66", "dtype": "float32"}, {"name": "67", "dtype": "float32"}, {"name": "68", "dtype": "float32"}, {"name": "69", "dtype": "float32"}, {"name": "70", "dtype": "float32"}, {"name": "71", "dtype": "float32"}, {"name": "72", "dtype": "float32"}, {"name": "73", "dtype": "float32"}, {"name": "74", "dtype": "float32"}, {"name": "75", "dtype": "float32"}, {"name": "76", "dtype": "float32"}, {"name": "77", "dtype": "float32"}, {"name": "78", "dtype": "float32"}, {"name": "79", "dtype": "float32"}, {"name": "80", "dtype": "float32"}, {"name": "81", "dtype": "float32"}, {"name": "82", "dtype": "float32"}, {"name": "83", "dtype": "float32"}, {"name": "84", "dtype": "float32"}, {"name": "85", "dtype": "float32"}, {"name": "86", "dtype": "float32"}, {"name": "87", "dtype": "float32"}, {"name": "88", "dtype": "float32"}, {"name": "89", "dtype": "float32"}, {"name": "90", "dtype": "float32"}, {"name": "91", "dtype": "float32"}, {"name": "92", "dtype": "float32"}, {"name": "93", "dtype": "float32"}, {"name": "94", "dtype": "float32"}, {"name": "95", "dtype": "float32"}, {"name": "96", "dtype": "float32"}, {"name": "97", "dtype": "float32"}, {"name": "98", "dtype": "float32"}, {"name": "99", "dtype": "float32"}, {"name": "100", "dtype": "float32"}, {"name": "101", "dtype": "float32"}, {"name": "102", "dtype": "float32"}, {"name": "103", "dtype": "float32"}, {"name": "104", "dtype": "float32"}, {"name": "105", "dtype": "float32"}, {"name": "106", "dtype": "float32"}, {"name": "107", "dtype": "float32"}, {"name": "108", "dtype": "float32"}, {"name": "109", "dtype": "float32"}, {"name": "110", "dtype": "float32"}, {"name": "111", "dtype": "float32"}, {"name": "112", "dtype": "float32"}, {"name": "113", "dtype": "float32"}, {"name": "114", "dtype": "float32"}, {"name": "115", "dtype": "float32"}, {"name": "116", "dtype": "float32"}, {"name": "117", "dtype": "float32"}, {"name": "118", "dtype": "float32"}, {"name": "119", "dtype": "float32"}, {"name": "120", "dtype": "float32"}, {"name": "121", "dtype": "float32"}, {"name": "122", "dtype": "float32"}, {"name": "123", "dtype": "float32"}, {"name": "124", "dtype": "float32"}, {"name": "125", "dtype": "float32"}, {"name": "126", "dtype": "float32"}, {"name": "127", "dtype": "float32"}, {"name": "128", "dtype": "float32"}, {"name": "129", "dtype": "float32"}, {"name": "130", "dtype": "float32"}, {"name": "131", "dtype": "float32"}, {"name": "132", "dtype": "float32"}, {"name": "133", "dtype": "float32"}, {"name": "134", "dtype": "float32"}, {"name": "135", "dtype": "float32"}, {"name": "136", "dtype": "float32"}, {"name": "137", "dtype": "float32"}, {"name": "138", "dtype": "float32"}, {"name": "139", "dtype": "float32"}, {"name": "140", "dtype": "float32"}, {"name": "141", "dtype": "float32"}, {"name": "142", "dtype": "float32"}, {"name": "143", "dtype": "float32"}, {"name": "144", "dtype": "float32"}, {"name": "145", "dtype": "float32"}, {"name": "146", "dtype": "float32"}, {"name": "147", "dtype": "float32"}, {"name": "148", "dtype": "float32"}, {"name": "149", "dtype": "float32"}, {"name": "150", "dtype": "float32"}, {"name": "151", "dtype": "float32"}, {"name": "152", "dtype": "float32"}, {"name": "153", "dtype": "float32"}, {"name": "154", "dtype": "float32"}, {"name": "155", "dtype": "float32"}, {"name": "156", "dtype": "float32"}, {"name": "157", "dtype": "float32"}, {"name": "158", "dtype": "float32"}, {"name": "159", "dtype": "float32"}, {"name": "160", "dtype": "float32"}, {"name": "161", "dtype": "float32"}, {"name": "162", "dtype": "float32"}, {"name": "163", "dtype": "float32"}, {"name": "164", "dtype": "float32"}, {"name": "165", "dtype": "float32"}, {"name": "166", "dtype": "float32"}, {"name": "167", "dtype": "float32"}, {"name": "168", "dtype": "float32"}, {"name": "169", "dtype": "float32"}, {"name": "170", "dtype": "float32"}, {"name": "171", "dtype": "float32"}, {"name": "172", "dtype": "float32"}, {"name": "173", "dtype": "float32"}, {"name": "174", "dtype": "float32"}, {"name": "175", "dtype": "float32"}, {"name": "176", "dtype": "float32"}, {"name": "177", "dtype": "float32"}, {"name": "178", "dtype": "float32"}, {"name": "179", "dtype": "float32"}, {"name": "180", "dtype": "float32"}, {"name": "181", "dtype": "float32"}, {"name": "182", "dtype": "float32"}, {"name": "183", "dtype": "float32"}, {"name": "184", "dtype": "float32"}, {"name": "185", "dtype": "float32"}, {"name": "186", "dtype": "float32"}, {"name": "187", "dtype": "float32"}, {"name": "188", "dtype": "float32"}, {"name": "189", "dtype": "float32"}, {"name": "190", "dtype": "float32"}, {"name": "191", "dtype": "float32"}, {"name": "192", "dtype": "float32"}, {"name": "193", "dtype": "float32"}, {"name": "194", "dtype": "float32"}, {"name": "195", "dtype": "float32"}, {"name": "196", "dtype": "float32"}, {"name": "197", "dtype": "float32"}, {"name": "198", "dtype": "float32"}, {"name": "199", "dtype": "float32"}, {"name": "200", "dtype": "float32"}, {"name": "201", "dtype": "float32"}, {"name": "202", "dtype": "float32"}, {"name": "203", "dtype": "float32"}, {"name": "204", "dtype": "float32"}, {"name": "205", "dtype": "float32"}, {"name": "206", "dtype": "float32"}, {"name": "207", "dtype": "float32"}, {"name": "208", "dtype": "float32"}, {"name": "209", "dtype": "float32"}, {"name": "210", "dtype": "float32"}, {"name": "211", "dtype": "float32"}, {"name": "212", "dtype": "float32"}, {"name": "213", "dtype": "float32"}, {"name": "214", "dtype": "float32"}, {"name": "215", "dtype": "float32"}, {"name": "216", "dtype": "float32"}, {"name": "217", "dtype": "float32"}, {"name": "218", "dtype": "float32"}, {"name": "219", "dtype": "float32"}, {"name": "220", "dtype": "float32"}, {"name": "221", "dtype": "float32"}, {"name": "222", "dtype": "float32"}, {"name": "223", "dtype": "float32"}, {"name": "224", "dtype": "float32"}, {"name": "225", "dtype": "float32"}, {"name": "226", "dtype": "float32"}, {"name": "227", "dtype": "float32"}, {"name": "228", "dtype": "float32"}, {"name": "229", "dtype": "float32"}, {"name": "230", "dtype": "float32"}, {"name": "231", "dtype": "float32"}, {"name": "232", "dtype": "float32"}, {"name": "233", "dtype": "float32"}, {"name": "234", "dtype": "float32"}, {"name": "235", "dtype": "float32"}, {"name": "236", "dtype": "float32"}, {"name": "237", "dtype": "float32"}, {"name": "238", "dtype": "float32"}, {"name": "239", "dtype": "float32"}, {"name": "240", "dtype": "float32"}, {"name": "241", "dtype": "float32"}, {"name": "242", "dtype": "float32"}, {"name": "243", "dtype": "float32"}, {"name": "244", "dtype": "float32"}, {"name": "245", "dtype": "float32"}, {"name": "246", "dtype": "float32"}, {"name": "247", "dtype": "float32"}, {"name": "248", "dtype": "float32"}, {"name": "249", "dtype": "float32"}, {"name": "250", "dtype": "float32"}, {"name": "251", "dtype": "float32"}, {"name": "252", "dtype": "float32"}, {"name": "253", "dtype": "float32"}, {"name": "254", "dtype": "float32"}, {"name": "255", "dtype": "float32"}, {"name": "256", "dtype": "float32"}, {"name": "257", "dtype": "float32"}, {"name": "258", "dtype": "float32"}, {"name": "259", "dtype": "float32"}, {"name": "260", "dtype": "float32"}, {"name": "261", "dtype": "float32"}, {"name": "262", "dtype": "float32"}, {"name": "263", "dtype": "float32"}, {"name": "264", "dtype": "float32"}, {"name": "265", "dtype": "float32"}, {"name": "266", "dtype": "float32"}, {"name": "267", "dtype": "float32"}, {"name": "268", "dtype": "float32"}, {"name": "269", "dtype": "float32"}, {"name": "270", "dtype": "float32"}, {"name": "271", "dtype": "float32"}, {"name": "272", "dtype": "float32"}, {"name": "273", "dtype": "float32"}, {"name": "274", "dtype": "float32"}, {"name": "275", "dtype": "float32"}, {"name": "276", "dtype": "float32"}, {"name": "277", "dtype": "float32"}, {"name": "278", "dtype": "float32"}, {"name": "279", "dtype": "float32"}, {"name": "280", "dtype": "float32"}, {"name": "281", "dtype": "float32"}, {"name": "282", "dtype": "float32"}, {"name": "283", "dtype": "float32"}, {"name": "284", "dtype": "float32"}, {"name": "285", "dtype": "float32"}, {"name": "286", "dtype": "float32"}, {"name": "287", "dtype": "float32"}, {"name": "288", "dtype": "float32"}, {"name": "289", "dtype": "float32"}, {"name": "290", "dtype": "float32"}, {"name": "291", "dtype": "float32"}, {"name": "292", "dtype": "float32"}, {"name": "293", "dtype": "float32"}, {"name": "294", "dtype": "float32"}, {"name": "295", "dtype": "float32"}, {"name": "296", "dtype": "float32"}, {"name": "297", "dtype": "float32"}, {"name": "298", "dtype": "float32"}, {"name": "299", "dtype": "float32"}, {"name": "300", "dtype": "float32"}, {"name": "301", "dtype": "float32"}, {"name": "302", "dtype": "float32"}, {"name": "303", "dtype": "float32"}, {"name": "304", "dtype": "float32"}, {"name": "305", "dtype": "float32"}, {"name": "306", "dtype": "float32"}, {"name": "307", "dtype": "float32"}, {"name": "308", "dtype": "float32"}, {"name": "309", "dtype": "float32"}, {"name": "310", "dtype": "float32"}, {"name": "311", "dtype": "float32"}, {"name": "312", "dtype": "float32"}, {"name": "313", "dtype": "float32"}, {"name": "314", "dtype": "float32"}, {"name": "315", "dtype": "float32"}, {"name": "316", "dtype": "float32"}, {"name": "317", "dtype": "float32"}, {"name": "318", "dtype": "float32"}, {"name": "319", "dtype": "float32"}, {"name": "320", "dtype": "float32"}, {"name": "321", "dtype": "float32"}, {"name": "322", "dtype": "float32"}, {"name": "323", "dtype": "float32"}, {"name": "324", "dtype": "float32"}, {"name": "325", "dtype": "float32"}, {"name": "326", "dtype": "float32"}, {"name": "327", "dtype": "float32"}, {"name": "328", "dtype": "float32"}, {"name": "329", "dtype": "float32"}, {"name": "330", "dtype": "float32"}, {"name": "331", "dtype": "float32"}, {"name": "332", "dtype": "float32"}, {"name": "333", "dtype": "float32"}, {"name": "334", "dtype": "float32"}, {"name": "335", "dtype": "float32"}, {"name": "336", "dtype": "float32"}, {"name": "337", "dtype": "float32"}, {"name": "338", "dtype": "float32"}, {"name": "339", "dtype": "float32"}, {"name": "340", "dtype": "float32"}, {"name": "341", "dtype": "float32"}, {"name": "342", "dtype": "float32"}, {"name": "343", "dtype": "float32"}, {"name": "344", "dtype": "float32"}, {"name": "345", "dtype": "float32"}, {"name": "346", "dtype": "float32"}, {"name": "347", "dtype": "float32"}, {"name": "348", "dtype": "float32"}, {"name": "349", "dtype": "float32"}, {"name": "350", "dtype": "float32"}, {"name": "351", "dtype": "float32"}, {"name": "352", "dtype": "float32"}, {"name": "353", "dtype": "float32"}, {"name": "354", "dtype": "float32"}, {"name": "355", "dtype": "float32"}, {"name": "356", "dtype": "float32"}, {"name": "357", "dtype": "float32"}, {"name": "358", "dtype": "float32"}, {"name": "359", "dtype": "float32"}, {"name": "360", "dtype": "float32"}, {"name": "361", "dtype": "float32"}, {"name": "362", "dtype": "float32"}, {"name": "363", "dtype": "float32"}, {"name": "364", "dtype": "float32"}, {"name": "365", "dtype": "float32"}, {"name": "366", "dtype": "float32"}, {"name": "367", "dtype": "float32"}, {"name": "368", "dtype": "float32"}, {"name": "369", "dtype": "float32"}, {"name": "370", "dtype": "float32"}, {"name": "371", "dtype": "float32"}, {"name": "372", "dtype": "float32"}, {"name": "373", "dtype": "float32"}, {"name": "374", "dtype": "float32"}, {"name": "375", "dtype": "float32"}, {"name": "376", "dtype": "float32"}, {"name": "377", "dtype": "float32"}, {"name": "378", "dtype": "float32"}, {"name": "379", "dtype": "float32"}, {"name": "380", "dtype": "float32"}, {"name": "381", "dtype": "float32"}, {"name": "382", "dtype": "float32"}, {"name": "383", "dtype": "float32"}, {"name": "384", "dtype": "float32"}, {"name": "385", "dtype": "float32"}, {"name": "386", "dtype": "float32"}, {"name": "387", "dtype": "float32"}, {"name": "388", "dtype": "float32"}, {"name": "389", "dtype": "float32"}, {"name": "390", "dtype": "float32"}, {"name": "391", "dtype": "float32"}, {"name": "392", "dtype": "float32"}, {"name": "393", "dtype": "float32"}, {"name": "394", "dtype": "float32"}, {"name": "395", "dtype": "float32"}, {"name": "396", "dtype": "float32"}, {"name": "397", "dtype": "float32"}, {"name": "398", "dtype": "float32"}, {"name": "399", "dtype": "float32"}, {"name": "400", "dtype": "float32"}, {"name": "401", "dtype": "float32"}, {"name": "402", "dtype": "float32"}, {"name": "403", "dtype": "float32"}, {"name": "404", "dtype": "float32"}, {"name": "405", "dtype": "float32"}, {"name": "406", "dtype": "float32"}, {"name": "407", "dtype": "float32"}, {"name": "408", "dtype": "float32"}, {"name": "409", "dtype": "float32"}, {"name": "410", "dtype": "float32"}, {"name": "411", "dtype": "float32"}, {"name": "412", "dtype": "float32"}, {"name": "413", "dtype": "float32"}, {"name": "414", "dtype": "float32"}, {"name": "415", "dtype": "float32"}, {"name": "416", "dtype": "float32"}, {"name": "417", "dtype": "float32"}, {"name": "418", "dtype": "float32"}, {"name": "419", "dtype": "float32"}, {"name": "420", "dtype": "float32"}, {"name": "421", "dtype": "float32"}, {"name": "422", "dtype": "float32"}, {"name": "423", "dtype": "float32"}, {"name": "424", "dtype": "float32"}, {"name": "425", "dtype": "float32"}, {"name": "426", "dtype": "float32"}, {"name": "427", "dtype": "float32"}, {"name": "428", "dtype": "float32"}, {"name": "429", "dtype": "float32"}, {"name": "430", "dtype": "float32"}, {"name": "431", "dtype": "float32"}, {"name": "432", "dtype": "float32"}, {"name": "433", "dtype": "float32"}, {"name": "434", "dtype": "float32"}, {"name": "435", "dtype": "float32"}, {"name": "436", "dtype": "float32"}, {"name": "437", "dtype": "float32"}, {"name": "438", "dtype": "float32"}, {"name": "439", "dtype": "float32"}, {"name": "440", "dtype": "float32"}, {"name": "441", "dtype": "float32"}, {"name": "442", "dtype": "float32"}, {"name": "443", "dtype": "float32"}, {"name": "444", "dtype": "float32"}, {"name": "445", "dtype": "float32"}, {"name": "446", "dtype": "float32"}, {"name": "447", "dtype": "float32"}, {"name": "448", "dtype": "float32"}, {"name": "449", "dtype": "float32"}, {"name": "450", "dtype": "float32"}, {"name": "451", "dtype": "float32"}, {"name": "452", "dtype": "float32"}, {"name": "453", "dtype": "float32"}, {"name": "454", "dtype": "float32"}, {"name": "455", "dtype": "float32"}, {"name": "456", "dtype": "float32"}, {"name": "457", "dtype": "float32"}, {"name": "458", "dtype": "float32"}, {"name": "459", "dtype": "float32"}, {"name": "460", "dtype": "float32"}, {"name": "461", "dtype": "float32"}, {"name": "462", "dtype": "float32"}, {"name": "463", "dtype": "float32"}, {"name": "464", "dtype": "float32"}, {"name": "465", "dtype": "float32"}, {"name": "466", "dtype": "float32"}, {"name": "467", "dtype": "float32"}, {"name": "468", "dtype": "float32"}, {"name": "469", "dtype": "float32"}, {"name": "470", "dtype": "float32"}, {"name": "471", "dtype": "float32"}, {"name": "472", "dtype": "float32"}, {"name": "473", "dtype": "float32"}, {"name": "474", "dtype": "float32"}, {"name": "475", "dtype": "float32"}, {"name": "476", "dtype": "float32"}, {"name": "477", "dtype": "float32"}, {"name": "478", "dtype": "float32"}, {"name": "479", "dtype": "float32"}, {"name": "480", "dtype": "float32"}, {"name": "481", "dtype": "float32"}, {"name": "482", "dtype": "float32"}, {"name": "483", "dtype": "float32"}, {"name": "484", "dtype": "float32"}, {"name": "485", "dtype": "float32"}, {"name": "486", "dtype": "float32"}, {"name": "487", "dtype": "float32"}, {"name": "488", "dtype": "float32"}, {"name": "489", "dtype": "float32"}, {"name": "490", "dtype": "float32"}, {"name": "491", "dtype": "float32"}, {"name": "492", "dtype": "float32"}, {"name": "493", "dtype": "float32"}, {"name": "494", "dtype": "float32"}, {"name": "495", "dtype": "float32"}, {"name": "496", "dtype": "float32"}, {"name": "497", "dtype": "float32"}, {"name": "498", "dtype": "float32"}, {"name": "499", "dtype": "float32"}, {"name": "500", "dtype": "float32"}, {"name": "501", "dtype": "float32"}, {"name": "502", "dtype": "float32"}, {"name": "503", "dtype": "float32"}, {"name": "504", "dtype": "float32"}, {"name": "505", "dtype": "float32"}, {"name": "506", "dtype": "float32"}, {"name": "507", "dtype": "float32"}, {"name": "508", "dtype": "float32"}, {"name": "509", "dtype": "float32"}, {"name": "510", "dtype": "float32"}, {"name": "511", "dtype": "float32"}, {"name": "512", "dtype": "float32"}, {"name": "513", "dtype": "float32"}, {"name": "514", "dtype": "float32"}, {"name": "515", "dtype": "float32"}, {"name": "516", "dtype": "float32"}, {"name": "517", "dtype": "float32"}, {"name": "518", "dtype": "float32"}, {"name": "519", "dtype": "float32"}, {"name": "520", "dtype": "float32"}, {"name": "521", "dtype": "float32"}, {"name": "522", "dtype": "float32"}, {"name": "523", "dtype": "float32"}, {"name": "524", "dtype": "float32"}, {"name": "525", "dtype": "float32"}, {"name": "526", "dtype": "float32"}, {"name": "527", "dtype": "float32"}, {"name": "528", "dtype": "float32"}, {"name": "529", "dtype": "float32"}, {"name": "530", "dtype": "float32"}, {"name": "531", "dtype": "float32"}, {"name": "532", "dtype": "float32"}, {"name": "533", "dtype": "float32"}, {"name": "534", "dtype": "float32"}, {"name": "535", "dtype": "float32"}, {"name": "536", "dtype": "float32"}, {"name": "537", "dtype": "float32"}, {"name": "538", "dtype": "float32"}, {"name": "539", "dtype": "float32"}, {"name": "540", "dtype": "float32"}, {"name": "541", "dtype": "float32"}, {"name": "542", "dtype": "float32"}, {"name": "543", "dtype": "float32"}, {"name": "544", "dtype": "float32"}, {"name": "545", "dtype": "float32"}, {"name": "546", "dtype": "float32"}, {"name": "547", "dtype": "float32"}, {"name": "548", "dtype": "float32"}, {"name": "549", "dtype": "float32"}, {"name": "550", "dtype": "float32"}, {"name": "551", "dtype": "float32"}, {"name": "552", "dtype": "float32"}, {"name": "553", "dtype": "float32"}, {"name": "554", "dtype": "float32"}, {"name": "555", "dtype": "float32"}, {"name": "556", "dtype": "float32"}, {"name": "557", "dtype": "float32"}, {"name": "558", "dtype": "float32"}, {"name": "559", "dtype": "float32"}, {"name": "560", "dtype": "float32"}, {"name": "561", "dtype": "float32"}, {"name": "562", "dtype": "float32"}, {"name": "563", "dtype": "float32"}, {"name": "564", "dtype": "float32"}, {"name": "565", "dtype": "float32"}, {"name": "566", "dtype": "float32"}, {"name": "567", "dtype": "float32"}, {"name": "568", "dtype": "float32"}, {"name": "569", "dtype": "float32"}, {"name": "570", "dtype": "float32"}, {"name": "571", "dtype": "float32"}, {"name": "572", "dtype": "float32"}, {"name": "573", "dtype": "float32"}, {"name": "574", "dtype": "float32"}, {"name": "575", "dtype": "float32"}, {"name": "576", "dtype": "float32"}, {"name": "577", "dtype": "float32"}, {"name": "578", "dtype": "float32"}, {"name": "579", "dtype": "float32"}, {"name": "580", "dtype": "float32"}, {"name": "581", "dtype": "float32"}, {"name": "582", "dtype": "float32"}, {"name": "583", "dtype": "float32"}, {"name": "584", "dtype": "float32"}, {"name": "585", "dtype": "float32"}, {"name": "586", "dtype": "float32"}, {"name": "587", "dtype": "float32"}, {"name": "588", "dtype": "float32"}, {"name": "589", "dtype": "float32"}, {"name": "590", "dtype": "float32"}, {"name": "591", "dtype": "float32"}, {"name": "592", "dtype": "float32"}, {"name": "593", "dtype": "float32"}, {"name": "594", "dtype": "float32"}, {"name": "595", "dtype": "float32"}, {"name": "596", "dtype": "float32"}, {"name": "597", "dtype": "float32"}, {"name": "598", "dtype": "float32"}, {"name": "599", "dtype": "float32"}, {"name": "600", "dtype": "float32"}, {"name": "601", "dtype": "float32"}, {"name": "602", "dtype": "float32"}, {"name": "603", "dtype": "float32"}, {"name": "604", "dtype": "float32"}, {"name": "605", "dtype": "float32"}, {"name": "606", "dtype": "float32"}, {"name": "607", "dtype": "float32"}, {"name": "608", "dtype": "float32"}, {"name": "609", "dtype": "float32"}, {"name": "610", "dtype": "float32"}, {"name": "611", "dtype": "float32"}, {"name": "612", "dtype": "float32"}, {"name": "613", "dtype": "float32"}, {"name": "614", "dtype": "float32"}, {"name": "615", "dtype": "float32"}, {"name": "616", "dtype": "float32"}, {"name": "617", "dtype": "float32"}, {"name": "618", "dtype": "float32"}, {"name": "619", "dtype": "float32"}, {"name": "620", "dtype": "float32"}, {"name": "621", "dtype": "float32"}, {"name": "622", "dtype": "float32"}, {"name": "623", "dtype": "float32"}, {"name": "624", "dtype": "float32"}, {"name": "625", "dtype": "float32"}, {"name": "626", "dtype": "float32"}, {"name": "627", "dtype": "float32"}, {"name": "628", "dtype": "float32"}, {"name": "629", "dtype": "float32"}, {"name": "630", "dtype": "float32"}, {"name": "631", "dtype": "float32"}, {"name": "632", "dtype": "float32"}, {"name": "633", "dtype": "float32"}, {"name": "634", "dtype": "float32"}, {"name": "635", "dtype": "float32"}, {"name": "636", "dtype": "float32"}, {"name": "637", "dtype": "float32"}, {"name": "638", "dtype": "float32"}, {"name": "639", "dtype": "float32"}, {"name": "640", "dtype": "float32"}, {"name": "641", "dtype": "float32"}, {"name": "642", "dtype": "float32"}, {"name": "643", "dtype": "float32"}, {"name": "644", "dtype": "float32"}, {"name": "645", "dtype": "float32"}, {"name": "646", "dtype": "float32"}, {"name": "647", "dtype": "float32"}, {"name": "648", "dtype": "float32"}, {"name": "649", "dtype": "float32"}, {"name": "650", "dtype": "float32"}, {"name": "651", "dtype": "float32"}, {"name": "652", "dtype": "float32"}, {"name": "653", "dtype": "float32"}, {"name": "654", "dtype": "float32"}, {"name": "655", "dtype": "float32"}, {"name": "656", "dtype": "float32"}, {"name": "657", "dtype": "float32"}, {"name": "658", "dtype": "float32"}, {"name": "659", "dtype": "float32"}, {"name": "660", "dtype": "float32"}, {"name": "661", "dtype": "float32"}, {"name": "662", "dtype": "float32"}, {"name": "663", "dtype": "float32"}, {"name": "664", "dtype": "float32"}, {"name": "665", "dtype": "float32"}, {"name": "666", "dtype": "float32"}, {"name": "667", "dtype": "float32"}, {"name": "668", "dtype": "float32"}, {"name": "669", "dtype": "float32"}, {"name": "670", "dtype": "float32"}, {"name": "671", "dtype": "float32"}, {"name": "672", "dtype": "float32"}, {"name": "673", "dtype": "float32"}, {"name": "674", "dtype": "float32"}, {"name": "675", "dtype": "float32"}, {"name": "676", "dtype": "float32"}, {"name": "677", "dtype": "float32"}, {"name": "678", "dtype": "float32"}, {"name": "679", "dtype": "float32"}, {"name": "680", "dtype": "float32"}, {"name": "681", "dtype": "float32"}, {"name": "682", "dtype": "float32"}, {"name": "683", "dtype": "float32"}, {"name": "684", "dtype": "float32"}, {"name": "685", "dtype": "float32"}, {"name": "686", "dtype": "float32"}, {"name": "687", "dtype": "float32"}, {"name": "688", "dtype": "float32"}, {"name": "689", "dtype": "float32"}, {"name": "690", "dtype": "float32"}, {"name": "691", "dtype": "float32"}, {"name": "692", "dtype": "float32"}, {"name": "693", "dtype": "float32"}, {"name": "694", "dtype": "float32"}, {"name": "695", "dtype": "float32"}, {"name": "696", "dtype": "float32"}, {"name": "697", "dtype": "float32"}, {"name": "698", "dtype": "float32"}, {"name": "699", "dtype": "float32"}, {"name": "700", "dtype": "float32"}, {"name": "701", "dtype": "float32"}, {"name": "702", "dtype": "float32"}, {"name": "703", "dtype": "float32"}, {"name": "704", "dtype": "float32"}, {"name": "705", "dtype": "float32"}, {"name": "706", "dtype": "float32"}, {"name": "707", "dtype": "float32"}, {"name": "708", "dtype": "float32"}, {"name": "709", "dtype": "float32"}, {"name": "710", "dtype": "float32"}, {"name": "711", "dtype": "float32"}, {"name": "712", "dtype": "float32"}, {"name": "713", "dtype": "float32"}, {"name": "714", "dtype": "float32"}, {"name": "715", "dtype": "float32"}, {"name": "716", "dtype": "float32"}, {"name": "717", "dtype": "float32"}, {"name": "718", "dtype": "float32"}, {"name": "719", "dtype": "float32"}, {"name": "720", "dtype": "float32"}, {"name": "721", "dtype": "float32"}, {"name": "722", "dtype": "float32"}, {"name": "723", "dtype": "float32"}, {"name": "724", "dtype": "float32"}, {"name": "725", "dtype": "float32"}, {"name": "726", "dtype": "float32"}, {"name": "727", "dtype": "float32"}, {"name": "728", "dtype": "float32"}, {"name": "729", "dtype": "float32"}, {"name": "730", "dtype": "float32"}, {"name": "731", "dtype": "float32"}, {"name": "732", "dtype": "float32"}, {"name": "733", "dtype": "float32"}, {"name": "734", "dtype": "float32"}, {"name": "735", "dtype": "float32"}, {"name": "736", "dtype": "float32"}, {"name": "737", "dtype": "float32"}, {"name": "738", "dtype": "float32"}, {"name": "739", "dtype": "float32"}, {"name": "740", "dtype": "float32"}, {"name": "741", "dtype": "float32"}, {"name": "742", "dtype": "float32"}, {"name": "743", "dtype": "float32"}, {"name": "744", "dtype": "float32"}, {"name": "745", "dtype": "float32"}, {"name": "746", "dtype": "float32"}, {"name": "747", "dtype": "float32"}, {"name": "748", "dtype": "float32"}, {"name": "749", "dtype": "float32"}, {"name": "750", "dtype": "float32"}, {"name": "751", "dtype": "float32"}, {"name": "752", "dtype": "float32"}, {"name": "753", "dtype": "float32"}, {"name": "754", "dtype": "float32"}, {"name": "755", "dtype": "float32"}, {"name": "756", "dtype": "float32"}, {"name": "757", "dtype": "float32"}, {"name": "758", "dtype": "float32"}, {"name": "759", "dtype": "float32"}, {"name": "760", "dtype": "float32"}, {"name": "761", "dtype": "float32"}, {"name": "762", "dtype": "float32"}, {"name": "763", "dtype": "float32"}, {"name": "764", "dtype": "float32"}, {"name": "765", "dtype": "float32"}, {"name": "766", "dtype": "float32"}, {"name": "767", "dtype": "float32"}, {"name": "768", "dtype": "float32"}, {"name": "769", "dtype": "float32"}, {"name": "770", "dtype": "float32"}, {"name": "771", "dtype": "float32"}, {"name": "772", "dtype": "float32"}, {"name": "773", "dtype": "float32"}, {"name": "774", "dtype": "float32"}, {"name": "775", "dtype": "float32"}, {"name": "776", "dtype": "float32"}, {"name": "777", "dtype": "float32"}, {"name": "778", "dtype": "float32"}, {"name": "779", "dtype": "float32"}, {"name": "780", "dtype": "float32"}, {"name": "781", "dtype": "float32"}, {"name": "782", "dtype": "float32"}, {"name": "783", "dtype": "float32"}, {"name": "784", "dtype": "float32"}, {"name": "785", "dtype": "float32"}, {"name": "786", "dtype": "float32"}, {"name": "787", "dtype": "float32"}, {"name": "788", "dtype": "float32"}, {"name": "789", "dtype": "float32"}, {"name": "790", "dtype": "float32"}, {"name": "791", "dtype": "float32"}, {"name": "792", "dtype": "float32"}, {"name": "793", "dtype": "float32"}, {"name": "794", "dtype": "float32"}, {"name": "795", "dtype": "float32"}, {"name": "796", "dtype": "float32"}, {"name": "797", "dtype": "float32"}, {"name": "798", "dtype": "float32"}, {"name": "799", "dtype": "float32"}, {"name": "800", "dtype": "float32"}, {"name": "801", "dtype": "float32"}, {"name": "802", "dtype": "float32"}, {"name": "803", "dtype": "float32"}, {"name": "804", "dtype": "float32"}, {"name": "805", "dtype": "float32"}, {"name": "806", "dtype": "float32"}, {"name": "807", "dtype": "float32"}, {"name": "808", "dtype": "float32"}, {"name": "809", "dtype": "float32"}, {"name": "810", "dtype": "float32"}, {"name": "811", "dtype": "float32"}, {"name": "812", "dtype": "float32"}, {"name": "813", "dtype": "float32"}, {"name": "814", "dtype": "float32"}, {"name": "815", "dtype": "float32"}, {"name": "816", "dtype": "float32"}, {"name": "817", "dtype": "float32"}, {"name": "818", "dtype": "float32"}, {"name": "819", "dtype": "float32"}, {"name": "820", "dtype": "float32"}, {"name": "821", "dtype": "float32"}, {"name": "822", "dtype": "float32"}, {"name": "823", "dtype": "float32"}, {"name": "824", "dtype": "float32"}, {"name": "825", "dtype": "float32"}, {"name": "826", "dtype": "float32"}, {"name": "827", "dtype": "float32"}, {"name": "828", "dtype": "float32"}, {"name": "829", "dtype": "float32"}, {"name": "830", "dtype": "float32"}, {"name": "831", "dtype": "float32"}, {"name": "832", "dtype": "float32"}, {"name": "833", "dtype": "float32"}, {"name": "834", "dtype": "float32"}, {"name": "835", "dtype": "float32"}, {"name": "836", "dtype": "float32"}, {"name": "837", "dtype": "float32"}, {"name": "838", "dtype": "float32"}, {"name": "839", "dtype": "float32"}, {"name": "840", "dtype": "float32"}, {"name": "841", "dtype": "float32"}, {"name": "842", "dtype": "float32"}, {"name": "843", "dtype": "float32"}, {"name": "844", "dtype": "float32"}, {"name": "845", "dtype": "float32"}, {"name": "846", "dtype": "float32"}, {"name": "847", "dtype": "float32"}, {"name": "848", "dtype": "float32"}, {"name": "849", "dtype": "float32"}, {"name": "850", "dtype": "float32"}, {"name": "851", "dtype": "float32"}, {"name": "852", "dtype": "float32"}, {"name": "853", "dtype": "float32"}, {"name": "854", "dtype": "float32"}, {"name": "855", "dtype": "float32"}, {"name": "856", "dtype": "float32"}, {"name": "857", "dtype": "float32"}, {"name": "858", "dtype": "float32"}, {"name": "859", "dtype": "float32"}, {"name": "860", "dtype": "float32"}, {"name": "861", "dtype": "float32"}, {"name": "862", "dtype": "float32"}, {"name": "863", "dtype": "float32"}, {"name": "864", "dtype": "float32"}, {"name": "865", "dtype": "float32"}, {"name": "866", "dtype": "float32"}, {"name": "867", "dtype": "float32"}, {"name": "868", "dtype": "float32"}, {"name": "869", "dtype": "float32"}, {"name": "870", "dtype": "float32"}, {"name": "871", "dtype": "float32"}, {"name": "872", "dtype": "float32"}, {"name": "873", "dtype": "float32"}, {"name": "874", "dtype": "float32"}, {"name": "875", "dtype": "float32"}, {"name": "876", "dtype": "float32"}, {"name": "877", "dtype": "float32"}, {"name": "878", "dtype": "float32"}, {"name": "879", "dtype": "float32"}, {"name": "880", "dtype": "float32"}, {"name": "881", "dtype": "float32"}, {"name": "882", "dtype": "float32"}, {"name": "883", "dtype": "float32"}, {"name": "884", "dtype": "float32"}, {"name": "885", "dtype": "float32"}, {"name": "886", "dtype": "float32"}, {"name": "887", "dtype": "float32"}, {"name": "888", "dtype": "float32"}, {"name": "889", "dtype": "float32"}, {"name": "890", "dtype": "float32"}, {"name": "891", "dtype": "float32"}, {"name": "892", "dtype": "float32"}, {"name": "893", "dtype": "float32"}, {"name": "894", "dtype": "float32"}, {"name": "895", "dtype": "float32"}, {"name": "896", "dtype": "float32"}, {"name": "897", "dtype": "float32"}, {"name": "898", "dtype": "float32"}, {"name": "899", "dtype": "float32"}, {"name": "900", "dtype": "float32"}, {"name": "901", "dtype": "float32"}, {"name": "902", "dtype": "float32"}, {"name": "903", "dtype": "float32"}, {"name": "904", "dtype": "float32"}, {"name": "905", "dtype": "float32"}, {"name": "906", "dtype": "float32"}, {"name": "907", "dtype": "float32"}, {"name": "908", "dtype": "float32"}, {"name": "909", "dtype": "float32"}, {"name": "910", "dtype": "float32"}, {"name": "911", "dtype": "float32"}, {"name": "912", "dtype": "float32"}, {"name": "913", "dtype": "float32"}, {"name": "914", "dtype": "float32"}, {"name": "915", "dtype": "float32"}, {"name": "916", "dtype": "float32"}, {"name": "917", "dtype": "float32"}, {"name": "918", "dtype": "float32"}, {"name": "919", "dtype": "float32"}, {"name": "920", "dtype": "float32"}, {"name": "921", "dtype": "float32"}, {"name": "922", "dtype": "float32"}, {"name": "923", "dtype": "float32"}, {"name": "924", "dtype": "float32"}, {"name": "925", "dtype": "float32"}, {"name": "926", "dtype": "float32"}, {"name": "927", "dtype": "float32"}, {"name": "928", "dtype": "float32"}, {"name": "929", "dtype": "float32"}, {"name": "930", "dtype": "float32"}, {"name": "931", "dtype": "float32"}, {"name": "932", "dtype": "float32"}, {"name": "933", "dtype": "float32"}, {"name": "934", "dtype": "float32"}, {"name": "935", "dtype": "float32"}, {"name": "936", "dtype": "float32"}, {"name": "937", "dtype": "float32"}, {"name": "938", "dtype": "float32"}, {"name": "939", "dtype": "float32"}, {"name": "940", "dtype": "float32"}, {"name": "941", "dtype": "float32"}, {"name": "942", "dtype": "float32"}, {"name": "943", "dtype": "float32"}, {"name": "944", "dtype": "float32"}, {"name": "945", "dtype": "float32"}, {"name": "946", "dtype": "float32"}, {"name": "947", "dtype": "float32"}, {"name": "948", "dtype": "float32"}, {"name": "949", "dtype": "float32"}, {"name": "950", "dtype": "float32"}, {"name": "951", "dtype": "float32"}, {"name": "952", "dtype": "float32"}, {"name": "953", "dtype": "float32"}, {"name": "954", "dtype": "float32"}, {"name": "955", "dtype": "float32"}, {"name": "956", "dtype": "float32"}, {"name": "957", "dtype": "float32"}, {"name": "958", "dtype": "float32"}, {"name": "959", "dtype": "float32"}, {"name": "960", "dtype": "float32"}, {"name": "961", "dtype": "float32"}, {"name": "962", "dtype": "float32"}, {"name": "963", "dtype": "float32"}, {"name": "964", "dtype": "float32"}, {"name": "965", "dtype": "float32"}, {"name": "966", "dtype": "float32"}, {"name": "967", "dtype": "float32"}, {"name": "968", "dtype": "float32"}, {"name": "969", "dtype": "float32"}, {"name": "970", "dtype": "float32"}, {"name": "971", "dtype": "float32"}, {"name": "972", "dtype": "float32"}, {"name": "973", "dtype": "float32"}, {"name": "974", "dtype": "float32"}, {"name": "975", "dtype": "float32"}, {"name": "976", "dtype": "float32"}, {"name": "977", "dtype": "float32"}, {"name": "978", "dtype": "float32"}, {"name": "979", "dtype": "float32"}, {"name": "980", "dtype": "float32"}, {"name": "981", "dtype": "float32"}, {"name": "982", "dtype": "float32"}, {"name": "983", "dtype": "float32"}, {"name": "984", "dtype": "float32"}, {"name": "985", "dtype": "float32"}, {"name": "986", "dtype": "float32"}, {"name": "987", "dtype": "float32"}, {"name": "988", "dtype": "float32"}, {"name": "989", "dtype": "float32"}, {"name": "990", "dtype": "float32"}, {"name": "991", "dtype": "float32"}, {"name": "992", "dtype": "float32"}, {"name": "993", "dtype": "float32"}, {"name": "994", "dtype": "float32"}, {"name": "995", "dtype": "float32"}, {"name": "996", "dtype": "float32"}, {"name": "997", "dtype": "float32"}, {"name": "998", "dtype": "float32"}, {"name": "999", "dtype": "float32"}, {"name": "1000", "dtype": "float32"}, {"name": "1001", "dtype": "float32"}, {"name": "1002", "dtype": "float32"}, {"name": "1003", "dtype": "float32"}, {"name": "1004", "dtype": "float32"}, {"name": "1005", "dtype": "float32"}, {"name": "1006", "dtype": "float32"}, {"name": "1007", "dtype": "float32"}, {"name": "1008", "dtype": "float32"}, {"name": "1009", "dtype": "float32"}, {"name": "1010", "dtype": "float32"}, {"name": "1011", "dtype": "float32"}, {"name": "1012", "dtype": "float32"}, {"name": "1013", "dtype": "float32"}, {"name": "1014", "dtype": "float32"}, {"name": "1015", "dtype": "float32"}, {"name": "1016", "dtype": "float32"}, {"name": "1017", "dtype": "float32"}, {"name": "1018", "dtype": "float32"}, {"name": "1019", "dtype": "float32"}, {"name": "1020", "dtype": "float32"}, {"name": "1021", "dtype": "float32"}, {"name": "1022", "dtype": "float32"}, {"name": "1023", "dtype": "float32"}, {"name": "1024", "dtype": "float32"}, {"name": "1025", "dtype": "float32"}, {"name": "1026", "dtype": "float32"}, {"name": "1027", "dtype": "float32"}, {"name": "1028", "dtype": "float32"}, {"name": "1029", "dtype": "float32"}, {"name": "1030", "dtype": "float32"}, {"name": "1031", "dtype": "float32"}, {"name": "1032", "dtype": "float32"}, {"name": "1033", "dtype": "float32"}, {"name": "1034", "dtype": "float32"}, {"name": "1035", "dtype": "float32"}, {"name": "1036", "dtype": "float32"}, {"name": "1037", "dtype": "float32"}, {"name": "1038", "dtype": "float32"}, {"name": "1039", "dtype": "float32"}, {"name": "1040", "dtype": "float32"}, {"name": "1041", "dtype": "float32"}, {"name": "1042", "dtype": "float32"}, {"name": "1043", "dtype": "float32"}, {"name": "1044", "dtype": "float32"}, {"name": "1045", "dtype": "float32"}, {"name": "1046", "dtype": "float32"}, {"name": "1047", "dtype": "float32"}, {"name": "1048", "dtype": "float32"}, {"name": "1049", "dtype": "float32"}, {"name": "1050", "dtype": "float32"}, {"name": "1051", "dtype": "float32"}, {"name": "1052", "dtype": "float32"}, {"name": "1053", "dtype": "float32"}, {"name": "1054", "dtype": "float32"}, {"name": "1055", "dtype": "float32"}, {"name": "1056", "dtype": "float32"}, {"name": "1057", "dtype": "float32"}, {"name": "1058", "dtype": "float32"}, {"name": "1059", "dtype": "float32"}, {"name": "1060", "dtype": "float32"}, {"name": "1061", "dtype": "float32"}, {"name": "1062", "dtype": "float32"}, {"name": "1063", "dtype": "float32"}, {"name": "1064", "dtype": "float32"}, {"name": "1065", "dtype": "float32"}, {"name": "1066", "dtype": "float32"}, {"name": "1067", "dtype": "float32"}, {"name": "1068", "dtype": "float32"}, {"name": "1069", "dtype": "float32"}, {"name": "1070", "dtype": "float32"}, {"name": "1071", "dtype": "float32"}, {"name": "1072", "dtype": "float32"}, {"name": "1073", "dtype": "float32"}, {"name": "1074", "dtype": "float32"}, {"name": "1075", "dtype": "float32"}, {"name": "1076", "dtype": "float32"}, {"name": "1077", "dtype": "float32"}, {"name": "1078", "dtype": "float32"}, {"name": "1079", "dtype": "float32"}, {"name": "1080", "dtype": "float32"}, {"name": "1081", "dtype": "float32"}, {"name": "1082", "dtype": "float32"}, {"name": "1083", "dtype": "float32"}, {"name": "1084", "dtype": "float32"}, {"name": "1085", "dtype": "float32"}, {"name": "1086", "dtype": "float32"}, {"name": "1087", "dtype": "float32"}, {"name": "1088", "dtype": "float32"}, {"name": "1089", "dtype": "float32"}, {"name": "1090", "dtype": "float32"}, {"name": "1091", "dtype": "float32"}, {"name": "1092", "dtype": "float32"}, {"name": "1093", "dtype": "float32"}, {"name": "1094", "dtype": "float32"}, {"name": "1095", "dtype": "float32"}, {"name": "1096", "dtype": "float32"}, {"name": "1097", "dtype": "float32"}, {"name": "1098", "dtype": "float32"}, {"name": "1099", "dtype": "float32"}, {"name": "1100", "dtype": "float32"}, {"name": "1101", "dtype": "float32"}, {"name": "1102", "dtype": "float32"}, {"name": "1103", "dtype": "float32"}, {"name": "1104", "dtype": "float32"}, {"name": "1105", "dtype": "float32"}, {"name": "1106", "dtype": "float32"}, {"name": "1107", "dtype": "float32"}, {"name": "1108", "dtype": "float32"}, {"name": "1109", "dtype": "float32"}, {"name": "1110", "dtype": "float32"}, {"name": "1111", "dtype": "float32"}, {"name": "1112", "dtype": "float32"}, {"name": "1113", "dtype": "float32"}, {"name": "1114", "dtype": "float32"}, {"name": "1115", "dtype": "float32"}, {"name": "1116", "dtype": "float32"}, {"name": "1117", "dtype": "float32"}, {"name": "1118", "dtype": "float32"}, {"name": "1119", "dtype": "float32"}, {"name": "1120", "dtype": "float32"}, {"name": "1121", "dtype": "float32"}, {"name": "1122", "dtype": "float32"}, {"name": "1123", "dtype": "float32"}, {"name": "1124", "dtype": "float32"}, {"name": "1125", "dtype": "float32"}, {"name": "1126", "dtype": "float32"}, {"name": "1127", "dtype": "float32"}, {"name": "1128", "dtype": "float32"}, {"name": "1129", "dtype": "float32"}, {"name": "1130", "dtype": "float32"}, {"name": "1131", "dtype": "float32"}, {"name": "1132", "dtype": "float32"}, {"name": "1133", "dtype": "float32"}, {"name": "1134", "dtype": "float32"}, {"name": "1135", "dtype": "float32"}, {"name": "1136", "dtype": "float32"}, {"name": "1137", "dtype": "float32"}, {"name": "1138", "dtype": "float32"}, {"name": "1139", "dtype": "float32"}, {"name": "1140", "dtype": "float32"}, {"name": "1141", "dtype": "float32"}, {"name": "1142", "dtype": "float32"}, {"name": "1143", "dtype": "float32"}, {"name": "1144", "dtype": "float32"}, {"name": "1145", "dtype": "float32"}, {"name": "1146", "dtype": "float32"}, {"name": "1147", "dtype": "float32"}, {"name": "1148", "dtype": "float32"}, {"name": "1149", "dtype": "float32"}, {"name": "1150", "dtype": "float32"}, {"name": "1151", "dtype": "float32"}, {"name": "1152", "dtype": "float32"}, {"name": "1153", "dtype": "float32"}, {"name": "1154", "dtype": "float32"}, {"name": "1155", "dtype": "float32"}, {"name": "1156", "dtype": "float32"}, {"name": "1157", "dtype": "float32"}, {"name": "1158", "dtype": "float32"}, {"name": "1159", "dtype": "float32"}, {"name": "1160", "dtype": "float32"}, {"name": "1161", "dtype": "float32"}, {"name": "1162", "dtype": "float32"}, {"name": "1163", "dtype": "float32"}, {"name": "1164", "dtype": "float32"}, {"name": "1165", "dtype": "float32"}, {"name": "1166", "dtype": "float32"}, {"name": "1167", "dtype": "float32"}, {"name": "1168", "dtype": "float32"}, {"name": "1169", "dtype": "float32"}, {"name": "1170", "dtype": "float32"}, {"name": "1171", "dtype": "float32"}, {"name": "1172", "dtype": "float32"}, {"name": "1173", "dtype": "float32"}, {"name": "1174", "dtype": "float32"}, {"name": "1175", "dtype": "float32"}, {"name": "1176", "dtype": "float32"}, {"name": "1177", "dtype": "float32"}, {"name": "1178", "dtype": "float32"}, {"name": "1179", "dtype": "float32"}, {"name": "1180", "dtype": "float32"}, {"name": "1181", "dtype": "float32"}, {"name": "1182", "dtype": "float32"}, {"name": "1183", "dtype": "float32"}, {"name": "1184", "dtype": "float32"}, {"name": "1185", "dtype": "float32"}, {"name": "1186", "dtype": "float32"}, {"name": "1187", "dtype": "float32"}, {"name": "1188", "dtype": "float32"}, {"name": "1189", "dtype": "float32"}, {"name": "1190", "dtype": "float32"}, {"name": "1191", "dtype": "float32"}, {"name": "1192", "dtype": "float32"}, {"name": "1193", "dtype": "float32"}, {"name": "1194", "dtype": "float32"}, {"name": "1195", "dtype": "float32"}, {"name": "1196", "dtype": "float32"}, {"name": "1197", "dtype": "float32"}, {"name": "1198", "dtype": "float32"}, {"name": "1199", "dtype": "float32"}, {"name": "1200", "dtype": "float32"}, {"name": "1201", "dtype": "float32"}, {"name": "1202", "dtype": "float32"}, {"name": "1203", "dtype": "float32"}, {"name": "1204", "dtype": "float32"}, {"name": "1205", "dtype": "float32"}, {"name": "1206", "dtype": "float32"}, {"name": "1207", "dtype": "float32"}, {"name": "1208", "dtype": "float32"}, {"name": "1209", "dtype": "float32"}, {"name": "1210", "dtype": "float32"}, {"name": "1211", "dtype": "float32"}, {"name": "1212", "dtype": "float32"}, {"name": "1213", "dtype": "float32"}, {"name": "1214", "dtype": "float32"}, {"name": "1215", "dtype": "float32"}, {"name": "1216", "dtype": "float32"}, {"name": "1217", "dtype": "float32"}, {"name": "1218", "dtype": "float32"}, {"name": "1219", "dtype": "float32"}, {"name": "1220", "dtype": "float32"}, {"name": "1221", "dtype": "float32"}, {"name": "1222", "dtype": "float32"}, {"name": "1223", "dtype": "float32"}, {"name": "1224", "dtype": "float32"}, {"name": "1225", "dtype": "float32"}, {"name": "1226", "dtype": "float32"}, {"name": "1227", "dtype": "float32"}, {"name": "1228", "dtype": "float32"}, {"name": "1229", "dtype": "float32"}, {"name": "1230", "dtype": "float32"}, {"name": "1231", "dtype": "float32"}, {"name": "1232", "dtype": "float32"}, {"name": "1233", "dtype": "float32"}, {"name": "1234", "dtype": "float32"}, {"name": "1235", "dtype": "float32"}, {"name": "1236", "dtype": "float32"}, {"name": "1237", "dtype": "float32"}, {"name": "1238", "dtype": "float32"}, {"name": "1239", "dtype": "float32"}, {"name": "1240", "dtype": "float32"}, {"name": "1241", "dtype": "float32"}, {"name": "1242", "dtype": "float32"}, {"name": "1243", "dtype": "float32"}, {"name": "1244", "dtype": "float32"}, {"name": "1245", "dtype": "float32"}, {"name": "1246", "dtype": "float32"}, {"name": "1247", "dtype": "float32"}, {"name": "1248", "dtype": "float32"}, {"name": "1249", "dtype": "float32"}, {"name": "1250", "dtype": "float32"}, {"name": "1251", "dtype": "float32"}, {"name": "1252", "dtype": "float32"}, {"name": "1253", "dtype": "float32"}, {"name": "1254", "dtype": "float32"}, {"name": "1255", "dtype": "float32"}, {"name": "1256", "dtype": "float32"}, {"name": "1257", "dtype": "float32"}, {"name": "1258", "dtype": "float32"}, {"name": "1259", "dtype": "float32"}, {"name": "1260", "dtype": "float32"}, {"name": "1261", "dtype": "float32"}, {"name": "1262", "dtype": "float32"}, {"name": "1263", "dtype": "float32"}, {"name": "1264", "dtype": "float32"}, {"name": "1265", "dtype": "float32"}, {"name": "1266", "dtype": "float32"}, {"name": "1267", "dtype": "float32"}, {"name": "1268", "dtype": "float32"}, {"name": "1269", "dtype": "float32"}, {"name": "1270", "dtype": "float32"}, {"name": "1271", "dtype": "float32"}, {"name": "1272", "dtype": "float32"}, {"name": "1273", "dtype": "float32"}, {"name": "1274", "dtype": "float32"}, {"name": "1275", "dtype": "float32"}, {"name": "1276", "dtype": "float32"}, {"name": "1277", "dtype": "float32"}, {"name": "1278", "dtype": "float32"}, {"name": "1279", "dtype": "float32"}, {"name": "1280", "dtype": "float32"}, {"name": "1281", "dtype": "float32"}, {"name": "1282", "dtype": "float32"}, {"name": "1283", "dtype": "float32"}, {"name": "1284", "dtype": "float32"}, {"name": "1285", "dtype": "float32"}, {"name": "1286", "dtype": "float32"}, {"name": "1287", "dtype": "float32"}, {"name": "1288", "dtype": "float32"}, {"name": "1289", "dtype": "float32"}, {"name": "1290", "dtype": "float32"}, {"name": "1291", "dtype": "float32"}, {"name": "1292", "dtype": "float32"}, {"name": "1293", "dtype": "float32"}, {"name": "1294", "dtype": "float32"}, {"name": "1295", "dtype": "float32"}, {"name": "1296", "dtype": "float32"}, {"name": "1297", "dtype": "float32"}, {"name": "1298", "dtype": "float32"}, {"name": "1299", "dtype": "float32"}, {"name": "1300", "dtype": "float32"}, {"name": "1301", "dtype": "float32"}, {"name": "1302", "dtype": "float32"}, {"name": "1303", "dtype": "float32"}, {"name": "1304", "dtype": "float32"}, {"name": "1305", "dtype": "float32"}, {"name": "1306", "dtype": "float32"}, {"name": "1307", "dtype": "float32"}, {"name": "1308", "dtype": "float32"}, {"name": "1309", "dtype": "float32"}, {"name": "1310", "dtype": "float32"}, {"name": "1311", "dtype": "float32"}, {"name": "1312", "dtype": "float32"}, {"name": "1313", "dtype": "float32"}, {"name": "1314", "dtype": "float32"}, {"name": "1315", "dtype": "float32"}, {"name": "1316", "dtype": "float32"}, {"name": "1317", "dtype": "float32"}, {"name": "1318", "dtype": "float32"}, {"name": "1319", "dtype": "float32"}, {"name": "1320", "dtype": "float32"}, {"name": "1321", "dtype": "float32"}, {"name": "1322", "dtype": "float32"}, {"name": "1323", "dtype": "float32"}, {"name": "1324", "dtype": "float32"}, {"name": "1325", "dtype": "float32"}, {"name": "1326", "dtype": "float32"}, {"name": "1327", "dtype": "float32"}, {"name": "1328", "dtype": "float32"}, {"name": "1329", "dtype": "float32"}, {"name": "1330", "dtype": "float32"}, {"name": "1331", "dtype": "float32"}, {"name": "1332", "dtype": "float32"}, {"name": "1333", "dtype": "float32"}, {"name": "1334", "dtype": "float32"}, {"name": "1335", "dtype": "float32"}, {"name": "1336", "dtype": "float32"}, {"name": "1337", "dtype": "float32"}, {"name": "1338", "dtype": "float32"}, {"name": "1339", "dtype": "float32"}, {"name": "1340", "dtype": "float32"}, {"name": "1341", "dtype": "float32"}, {"name": "1342", "dtype": "float32"}, {"name": "1343", "dtype": "float32"}, {"name": "1344", "dtype": "float32"}, {"name": "1345", "dtype": "float32"}, {"name": "1346", "dtype": "float32"}, {"name": "1347", "dtype": "float32"}, {"name": "1348", "dtype": "float32"}, {"name": "1349", "dtype": "float32"}, {"name": "1350", "dtype": "float32"}, {"name": "1351", "dtype": "float32"}, {"name": "1352", "dtype": "float32"}, {"name": "1353", "dtype": "float32"}, {"name": "1354", "dtype": "float32"}, {"name": "1355", "dtype": "float32"}, {"name": "1356", "dtype": "float32"}, {"name": "1357", "dtype": "float32"}, {"name": "1358", "dtype": "float32"}, {"name": "1359", "dtype": "float32"}, {"name": "1360", "dtype": "float32"}, {"name": "1361", "dtype": "float32"}, {"name": "1362", "dtype": "float32"}, {"name": "1363", "dtype": "float32"}, {"name": "1364", "dtype": "float32"}, {"name": "1365", "dtype": "float32"}, {"name": "1366", "dtype": "float32"}, {"name": "1367", "dtype": "float32"}, {"name": "1368", "dtype": "float32"}, {"name": "1369", "dtype": "float32"}, {"name": "1370", "dtype": "float32"}, {"name": "1371", "dtype": "float32"}, {"name": "1372", "dtype": "float32"}, {"name": "1373", "dtype": "float32"}, {"name": "1374", "dtype": "float32"}, {"name": "1375", "dtype": "float32"}, {"name": "1376", "dtype": "float32"}, {"name": "1377", "dtype": "float32"}, {"name": "1378", "dtype": "float32"}, {"name": "1379", "dtype": "float32"}, {"name": "1380", "dtype": "float32"}, {"name": "1381", "dtype": "float32"}, {"name": "1382", "dtype": "float32"}, {"name": "1383", "dtype": "float32"}, {"name": "1384", "dtype": "float32"}, {"name": "1385", "dtype": "float32"}, {"name": "1386", "dtype": "float32"}, {"name": "1387", "dtype": "float32"}, {"name": "1388", "dtype": "float32"}, {"name": "1389", "dtype": "float32"}, {"name": "1390", "dtype": "float32"}, {"name": "1391", "dtype": "float32"}, {"name": "1392", "dtype": "float32"}, {"name": "1393", "dtype": "float32"}, {"name": "1394", "dtype": "float32"}, {"name": "1395", "dtype": "float32"}, {"name": "1396", "dtype": "float32"}, {"name": "1397", "dtype": "float32"}, {"name": "1398", "dtype": "float32"}, {"name": "1399", "dtype": "float32"}, {"name": "1400", "dtype": "float32"}, {"name": "1401", "dtype": "float32"}, {"name": "1402", "dtype": "float32"}, {"name": "1403", "dtype": "float32"}, {"name": "1404", "dtype": "float32"}, {"name": "1405", "dtype": "float32"}, {"name": "1406", "dtype": "float32"}, {"name": "1407", "dtype": "float32"}, {"name": "1408", "dtype": "float32"}, {"name": "1409", "dtype": "float32"}, {"name": "1410", "dtype": "float32"}, {"name": "1411", "dtype": "float32"}, {"name": "1412", "dtype": "float32"}, {"name": "1413", "dtype": "float32"}, {"name": "1414", "dtype": "float32"}, {"name": "1415", "dtype": "float32"}, {"name": "1416", "dtype": "float32"}, {"name": "1417", "dtype": "float32"}, {"name": "1418", "dtype": "float32"}, {"name": "1419", "dtype": "float32"}, {"name": "1420", "dtype": "float32"}, {"name": "1421", "dtype": "float32"}, {"name": "1422", "dtype": "float32"}, {"name": "1423", "dtype": "float32"}, {"name": "1424", "dtype": "float32"}, {"name": "1425", "dtype": "float32"}, {"name": "1426", "dtype": "float32"}, {"name": "1427", "dtype": "float32"}, {"name": "1428", "dtype": "float32"}, {"name": "1429", "dtype": "float32"}, {"name": "1430", "dtype": "float32"}, {"name": "1431", "dtype": "float32"}, {"name": "1432", "dtype": "float32"}, {"name": "1433", "dtype": "float32"}, {"name": "1434", "dtype": "float32"}, {"name": "1435", "dtype": "float32"}, {"name": "1436", "dtype": "float32"}, {"name": "1437", "dtype": "float32"}, {"name": "1438", "dtype": "float32"}, {"name": "1439", "dtype": "float32"}, {"name": "1440", "dtype": "float32"}, {"name": "1441", "dtype": "float32"}, {"name": "1442", "dtype": "float32"}, {"name": "1443", "dtype": "float32"}, {"name": "1444", "dtype": "float32"}, {"name": "1445", "dtype": "float32"}, {"name": "1446", "dtype": "float32"}, {"name": "1447", "dtype": "float32"}, {"name": "1448", "dtype": "float32"}, {"name": "1449", "dtype": "float32"}, {"name": "1450", "dtype": "float32"}, {"name": "1451", "dtype": "float32"}, {"name": "1452", "dtype": "float32"}, {"name": "1453", "dtype": "float32"}, {"name": "1454", "dtype": "float32"}, {"name": "1455", "dtype": "float32"}, {"name": "1456", "dtype": "float32"}, {"name": "1457", "dtype": "float32"}, {"name": "1458", "dtype": "float32"}, {"name": "1459", "dtype": "float32"}, {"name": "1460", "dtype": "float32"}, {"name": "1461", "dtype": "float32"}, {"name": "1462", "dtype": "float32"}, {"name": "1463", "dtype": "float32"}, {"name": "1464", "dtype": "float32"}, {"name": "1465", "dtype": "float32"}, {"name": "1466", "dtype": "float32"}, {"name": "1467", "dtype": "float32"}, {"name": "1468", "dtype": "float32"}, {"name": "1469", "dtype": "float32"}, {"name": "1470", "dtype": "float32"}, {"name": "1471", "dtype": "float32"}, {"name": "1472", "dtype": "float32"}, {"name": "1473", "dtype": "float32"}, {"name": "1474", "dtype": "float32"}, {"name": "1475", "dtype": "float32"}, {"name": "1476", "dtype": "float32"}, {"name": "1477", "dtype": "float32"}, {"name": "1478", "dtype": "float32"}, {"name": "1479", "dtype": "float32"}, {"name": "1480", "dtype": "float32"}, {"name": "1481", "dtype": "float32"}, {"name": "1482", "dtype": "float32"}, {"name": "1483", "dtype": "float32"}, {"name": "1484", "dtype": "float32"}, {"name": "1485", "dtype": "float32"}, {"name": "1486", "dtype": "float32"}, {"name": "1487", "dtype": "float32"}, {"name": "1488", "dtype": "float32"}, {"name": "1489", "dtype": "float32"}, {"name": "1490", "dtype": "float32"}, {"name": "1491", "dtype": "float32"}, {"name": "1492", "dtype": "float32"}, {"name": "1493", "dtype": "float32"}, {"name": "1494", "dtype": "float32"}, {"name": "1495", "dtype": "float32"}, {"name": "1496", "dtype": "float32"}, {"name": "1497", "dtype": "float32"}, {"name": "1498", "dtype": "float32"}, {"name": "1499", "dtype": "float32"}, {"name": "1500", "dtype": "float32"}, {"name": "1501", "dtype": "float32"}, {"name": "1502", "dtype": "float32"}, {"name": "1503", "dtype": "float32"}, {"name": "1504", "dtype": "float32"}, {"name": "1505", "dtype": "float32"}, {"name": "1506", "dtype": "float32"}, {"name": "1507", "dtype": "float32"}, {"name": "1508", "dtype": "float32"}, {"name": "1509", "dtype": "float32"}, {"name": "1510", "dtype": "float32"}, {"name": "1511", "dtype": "float32"}, {"name": "1512", "dtype": "float32"}, {"name": "1513", "dtype": "float32"}, {"name": "1514", "dtype": "float32"}, {"name": "1515", "dtype": "float32"}, {"name": "1516", "dtype": "float32"}, {"name": "1517", "dtype": "float32"}, {"name": "1518", "dtype": "float32"}, {"name": "1519", "dtype": "float32"}, {"name": "1520", "dtype": "float32"}, {"name": "1521", "dtype": "float32"}, {"name": "1522", "dtype": "float32"}, {"name": "1523", "dtype": "float32"}, {"name": "1524", "dtype": "float32"}, {"name": "1525", "dtype": "float32"}, {"name": "1526", "dtype": "float32"}, {"name": "1527", "dtype": "float32"}, {"name": "1528", "dtype": "float32"}, {"name": "1529", "dtype": "float32"}, {"name": "1530", "dtype": "float32"}, {"name": "1531", "dtype": "float32"}, {"name": "1532", "dtype": "float32"}, {"name": "1533", "dtype": "float32"}, {"name": "1534", "dtype": "float32"}, {"name": "1535", "dtype": "float32"}, {"name": "1536", "dtype": "float32"}, {"name": "1537", "dtype": "float32"}, {"name": "1538", "dtype": "float32"}, {"name": "1539", "dtype": "float32"}, {"name": "1540", "dtype": "float32"}, {"name": "1541", "dtype": "float32"}, {"name": "1542", "dtype": "float32"}, {"name": "1543", "dtype": "float32"}, {"name": "1544", "dtype": "float32"}, {"name": "1545", "dtype": "float32"}, {"name": "1546", "dtype": "float32"}, {"name": "1547", "dtype": "float32"}, {"name": "1548", "dtype": "float32"}, {"name": "1549", "dtype": "float32"}, {"name": "1550", "dtype": "float32"}, {"name": "1551", "dtype": "float32"}, {"name": "1552", "dtype": "float32"}, {"name": "1553", "dtype": "float32"}, {"name": "1554", "dtype": "float32"}, {"name": "1555", "dtype": "float32"}, {"name": "1556", "dtype": "float32"}, {"name": "1557", "dtype": "float32"}, {"name": "1558", "dtype": "float32"}, {"name": "1559", "dtype": "float32"}, {"name": "1560", "dtype": "float32"}, {"name": "1561", "dtype": "float32"}, {"name": "1562", "dtype": "float32"}, {"name": "1563", "dtype": "float32"}, {"name": "1564", "dtype": "float32"}, {"name": "1565", "dtype": "float32"}, {"name": "1566", "dtype": "float32"}, {"name": "1567", "dtype": "float32"}, {"name": "1568", "dtype": "float32"}, {"name": "1569", "dtype": "float32"}, {"name": "1570", "dtype": "float32"}, {"name": "1571", "dtype": "float32"}, {"name": "1572", "dtype": "float32"}, {"name": "1573", "dtype": "float32"}, {"name": "1574", "dtype": "float32"}, {"name": "1575", "dtype": "float32"}, {"name": "1576", "dtype": "float32"}, {"name": "1577", "dtype": "float32"}, {"name": "1578", "dtype": "float32"}, {"name": "1579", "dtype": "float32"}, {"name": "1580", "dtype": "float32"}, {"name": "1581", "dtype": "float32"}, {"name": "1582", "dtype": "float32"}, {"name": "1583", "dtype": "float32"}, {"name": "1584", "dtype": "float32"}, {"name": "1585", "dtype": "float32"}, {"name": "1586", "dtype": "float32"}, {"name": "1587", "dtype": "float32"}, {"name": "1588", "dtype": "float32"}, {"name": "1589", "dtype": "float32"}, {"name": "1590", "dtype": "float32"}, {"name": "1591", "dtype": "float32"}, {"name": "1592", "dtype": "float32"}, {"name": "1593", "dtype": "float32"}, {"name": "1594", "dtype": "float32"}, {"name": "1595", "dtype": "float32"}, {"name": "1596", "dtype": "float32"}, {"name": "1597", "dtype": "float32"}, {"name": "1598", "dtype": "float32"}, {"name": "1599", "dtype": "float32"}, {"name": "1600", "dtype": "float32"}, {"name": "1601", "dtype": "float32"}, {"name": "1602", "dtype": "float32"}, {"name": "1603", "dtype": "float32"}, {"name": "1604", "dtype": "float32"}, {"name": "1605", "dtype": "float32"}, {"name": "1606", "dtype": "float32"}, {"name": "1607", "dtype": "float32"}, {"name": "1608", "dtype": "float32"}, {"name": "1609", "dtype": "float32"}, {"name": "1610", "dtype": "float32"}, {"name": "1611", "dtype": "float32"}, {"name": "1612", "dtype": "float32"}, {"name": "1613", "dtype": "float32"}, {"name": "1614", "dtype": "float32"}, {"name": "1615", "dtype": "float32"}, {"name": "1616", "dtype": "float32"}, {"name": "1617", "dtype": "float32"}, {"name": "1618", "dtype": "float32"}, {"name": "1619", "dtype": "float32"}, {"name": "1620", "dtype": "float32"}, {"name": "1621", "dtype": "float32"}, {"name": "1622", "dtype": "float32"}, {"name": "1623", "dtype": "float32"}, {"name": "1624", "dtype": "float32"}, {"name": "1625", "dtype": "float32"}, {"name": "1626", "dtype": "float32"}, {"name": "1627", "dtype": "float32"}, {"name": "1628", "dtype": "float32"}, {"name": "1629", "dtype": "float32"}, {"name": "1630", "dtype": "float32"}, {"name": "1631", "dtype": "float32"}, {"name": "1632", "dtype": "float32"}, {"name": "1633", "dtype": "float32"}, {"name": "1634", "dtype": "float32"}, {"name": "1635", "dtype": "float32"}, {"name": "1636", "dtype": "float32"}, {"name": "1637", "dtype": "float32"}, {"name": "1638", "dtype": "float32"}, {"name": "1639", "dtype": "float32"}, {"name": "1640", "dtype": "float32"}, {"name": "1641", "dtype": "float32"}, {"name": "1642", "dtype": "float32"}, {"name": "1643", "dtype": "float32"}, {"name": "1644", "dtype": "float32"}, {"name": "1645", "dtype": "float32"}, {"name": "1646", "dtype": "float32"}, {"name": "1647", "dtype": "float32"}, {"name": "1648", "dtype": "float32"}, {"name": "1649", "dtype": "float32"}, {"name": "1650", "dtype": "float32"}, {"name": "1651", "dtype": "float32"}, {"name": "1652", "dtype": "float32"}, {"name": "1653", "dtype": "float32"}, {"name": "1654", "dtype": "float32"}, {"name": "1655", "dtype": "float32"}, {"name": "1656", "dtype": "float32"}, {"name": "1657", "dtype": "float32"}, {"name": "1658", "dtype": "float32"}, {"name": "1659", "dtype": "float32"}, {"name": "1660", "dtype": "float32"}, {"name": "1661", "dtype": "float32"}, {"name": "1662", "dtype": "float32"}, {"name": "1663", "dtype": "float32"}, {"name": "1664", "dtype": "float32"}, {"name": "1665", "dtype": "float32"}, {"name": "1666", "dtype": "float32"}, {"name": "1667", "dtype": "float32"}, {"name": "1668", "dtype": "float32"}, {"name": "1669", "dtype": "float32"}, {"name": "1670", "dtype": "float32"}, {"name": "1671", "dtype": "float32"}, {"name": "1672", "dtype": "float32"}, {"name": "1673", "dtype": "float32"}, {"name": "1674", "dtype": "float32"}, {"name": "1675", "dtype": "float32"}, {"name": "1676", "dtype": "float32"}, {"name": "1677", "dtype": "float32"}, {"name": "1678", "dtype": "float32"}, {"name": "1679", "dtype": "float32"}, {"name": "1680", "dtype": "float32"}, {"name": "1681", "dtype": "float32"}, {"name": "1682", "dtype": "float32"}, {"name": "1683", "dtype": "float32"}, {"name": "1684", "dtype": "float32"}, {"name": "1685", "dtype": "float32"}, {"name": "1686", "dtype": "float32"}, {"name": "1687", "dtype": "float32"}, {"name": "1688", "dtype": "float32"}, {"name": "1689", "dtype": "float32"}, {"name": "1690", "dtype": "float32"}, {"name": "1691", "dtype": "float32"}, {"name": "1692", "dtype": "float32"}, {"name": "1693", "dtype": "float32"}, {"name": "1694", "dtype": "float32"}, {"name": "1695", "dtype": "float32"}, {"name": "1696", "dtype": "float32"}, {"name": "1697", "dtype": "float32"}, {"name": "1698", "dtype": "float32"}, {"name": "1699", "dtype": "float32"}, {"name": "1700", "dtype": "float32"}, {"name": "1701", "dtype": "float32"}, {"name": "1702", "dtype": "float32"}, {"name": "1703", "dtype": "float32"}, {"name": "1704", "dtype": "float32"}, {"name": "1705", "dtype": "float32"}, {"name": "1706", "dtype": "float32"}, {"name": "1707", "dtype": "float32"}, {"name": "1708", "dtype": "float32"}, {"name": "1709", "dtype": "float32"}, {"name": "1710", "dtype": "float32"}, {"name": "1711", "dtype": "float32"}, {"name": "1712", "dtype": "float32"}, {"name": "1713", "dtype": "float32"}, {"name": "1714", "dtype": "float32"}, {"name": "1715", "dtype": "float32"}, {"name": "1716", "dtype": "float32"}, {"name": "1717", "dtype": "float32"}, {"name": "1718", "dtype": "float32"}, {"name": "1719", "dtype": "float32"}, {"name": "1720", "dtype": "float32"}, {"name": "1721", "dtype": "float32"}, {"name": "1722", "dtype": "float32"}, {"name": "1723", "dtype": "float32"}, {"name": "1724", "dtype": "float32"}, {"name": "1725", "dtype": "float32"}, {"name": "1726", "dtype": "float32"}, {"name": "1727", "dtype": "float32"}, {"name": "1728", "dtype": "float32"}, {"name": "1729", "dtype": "float32"}, {"name": "1730", "dtype": "float32"}, {"name": "1731", "dtype": "float32"}, {"name": "1732", "dtype": "float32"}, {"name": "1733", "dtype": "float32"}, {"name": "1734", "dtype": "float32"}, {"name": "1735", "dtype": "float32"}, {"name": "1736", "dtype": "float32"}, {"name": "1737", "dtype": "float32"}, {"name": "1738", "dtype": "float32"}, {"name": "1739", "dtype": "float32"}, {"name": "1740", "dtype": "float32"}, {"name": "1741", "dtype": "float32"}, {"name": "1742", "dtype": "float32"}, {"name": "1743", "dtype": "float32"}, {"name": "1744", "dtype": "float32"}, {"name": "1745", "dtype": "float32"}, {"name": "1746", "dtype": "float32"}, {"name": "1747", "dtype": "float32"}, {"name": "1748", "dtype": "float32"}, {"name": "1749", "dtype": "float32"}, {"name": "1750", "dtype": "float32"}, {"name": "1751", "dtype": "float32"}, {"name": "1752", "dtype": "float32"}, {"name": "1753", "dtype": "float32"}, {"name": "1754", "dtype": "float32"}, {"name": "1755", "dtype": "float32"}, {"name": "1756", "dtype": "float32"}, {"name": "1757", "dtype": "float32"}, {"name": "1758", "dtype": "float32"}, {"name": "1759", "dtype": "float32"}, {"name": "1760", "dtype": "float32"}, {"name": "1761", "dtype": "float32"}, {"name": "1762", "dtype": "float32"}, {"name": "1763", "dtype": "float32"}, {"name": "1764", "dtype": "float32"}, {"name": "1765", "dtype": "float32"}, {"name": "1766", "dtype": "float32"}, {"name": "1767", "dtype": "float32"}, {"name": "1768", "dtype": "float32"}, {"name": "1769", "dtype": "float32"}, {"name": "1770", "dtype": "float32"}, {"name": "1771", "dtype": "float32"}, {"name": "1772", "dtype": "float32"}, {"name": "1773", "dtype": "float32"}, {"name": "1774", "dtype": "float32"}, {"name": "1775", "dtype": "float32"}, {"name": "1776", "dtype": "float32"}, {"name": "1777", "dtype": "float32"}, {"name": "1778", "dtype": "float32"}, {"name": "1779", "dtype": "float32"}, {"name": "1780", "dtype": "float32"}, {"name": "1781", "dtype": "float32"}, {"name": "1782", "dtype": "float32"}, {"name": "1783", "dtype": "float32"}, {"name": "1784", "dtype": "float32"}, {"name": "1785", "dtype": "float32"}, {"name": "1786", "dtype": "float32"}, {"name": "1787", "dtype": "float32"}, {"name": "1788", "dtype": "float32"}, {"name": "1789", "dtype": "float32"}, {"name": "1790", "dtype": "float32"}, {"name": "1791", "dtype": "float32"}, {"name": "1792", "dtype": "float32"}, {"name": "1793", "dtype": "float32"}, {"name": "1794", "dtype": "float32"}, {"name": "1795", "dtype": "float32"}, {"name": "1796", "dtype": "float32"}, {"name": "1797", "dtype": "float32"}, {"name": "1798", "dtype": "float32"}, {"name": "1799", "dtype": "float32"}, {"name": "1800", "dtype": "float32"}, {"name": "1801", "dtype": "float32"}, {"name": "1802", "dtype": "float32"}, {"name": "1803", "dtype": "float32"}, {"name": "1804", "dtype": "float32"}, {"name": "1805", "dtype": "float32"}, {"name": "1806", "dtype": "float32"}, {"name": "1807", "dtype": "float32"}, {"name": "1808", "dtype": "float32"}, {"name": "1809", "dtype": "float32"}, {"name": "1810", "dtype": "float32"}, {"name": "1811", "dtype": "float32"}, {"name": "1812", "dtype": "float32"}, {"name": "1813", "dtype": "float32"}, {"name": "1814", "dtype": "float32"}, {"name": "1815", "dtype": "float32"}, {"name": "1816", "dtype": "float32"}, {"name": "1817", "dtype": "float32"}, {"name": "1818", "dtype": "float32"}, {"name": "1819", "dtype": "float32"}, {"name": "1820", "dtype": "float32"}, {"name": "1821", "dtype": "float32"}, {"name": "1822", "dtype": "float32"}, {"name": "1823", "dtype": "float32"}, {"name": "1824", "dtype": "float32"}, {"name": "1825", "dtype": "float32"}, {"name": "1826", "dtype": "float32"}, {"name": "1827", "dtype": "float32"}, {"name": "1828", "dtype": "float32"}, {"name": "1829", "dtype": "float32"}, {"name": "1830", "dtype": "float32"}, {"name": "1831", "dtype": "float32"}, {"name": "1832", "dtype": "float32"}, {"name": "1833", "dtype": "float32"}, {"name": "1834", "dtype": "float32"}, {"name": "1835", "dtype": "float32"}, {"name": "1836", "dtype": "float32"}, {"name": "1837", "dtype": "float32"}, {"name": "1838", "dtype": "float32"}, {"name": "1839", "dtype": "float32"}, {"name": "1840", "dtype": "float32"}, {"name": "1841", "dtype": "float32"}, {"name": "1842", "dtype": "float32"}, {"name": "1843", "dtype": "float32"}, {"name": "1844", "dtype": "float32"}, {"name": "1845", "dtype": "float32"}, {"name": "1846", "dtype": "float32"}, {"name": "1847", "dtype": "float32"}, {"name": "1848", "dtype": "float32"}, {"name": "1849", "dtype": "float32"}, {"name": "1850", "dtype": "float32"}, {"name": "1851", "dtype": "float32"}, {"name": "1852", "dtype": "float32"}, {"name": "1853", "dtype": "float32"}, {"name": "1854", "dtype": "float32"}, {"name": "1855", "dtype": "float32"}, {"name": "1856", "dtype": "float32"}, {"name": "1857", "dtype": "float32"}, {"name": "1858", "dtype": "float32"}, {"name": "1859", "dtype": "float32"}, {"name": "1860", "dtype": "float32"}, {"name": "1861", "dtype": "float32"}, {"name": "1862", "dtype": "float32"}, {"name": "1863", "dtype": "float32"}, {"name": "1864", "dtype": "float32"}, {"name": "1865", "dtype": "float32"}, {"name": "1866", "dtype": "float32"}, {"name": "1867", "dtype": "float32"}, {"name": "1868", "dtype": "float32"}, {"name": "1869", "dtype": "float32"}, {"name": "1870", "dtype": "float32"}, {"name": "1871", "dtype": "float32"}, {"name": "1872", "dtype": "float32"}, {"name": "1873", "dtype": "float32"}, {"name": "1874", "dtype": "float32"}, {"name": "1875", "dtype": "float32"}, {"name": "1876", "dtype": "float32"}, {"name": "1877", "dtype": "float32"}, {"name": "1878", "dtype": "float32"}, {"name": "1879", "dtype": "float32"}, {"name": "1880", "dtype": "float32"}, {"name": "1881", "dtype": "float32"}, {"name": "1882", "dtype": "float32"}, {"name": "1883", "dtype": "float32"}, {"name": "1884", "dtype": "float32"}, {"name": "1885", "dtype": "float32"}, {"name": "1886", "dtype": "float32"}, {"name": "1887", "dtype": "float32"}, {"name": "1888", "dtype": "float32"}, {"name": "1889", "dtype": "float32"}, {"name": "1890", "dtype": "float32"}, {"name": "1891", "dtype": "float32"}, {"name": "1892", "dtype": "float32"}, {"name": "1893", "dtype": "float32"}, {"name": "1894", "dtype": "float32"}, {"name": "1895", "dtype": "float32"}, {"name": "1896", "dtype": "float32"}, {"name": "1897", "dtype": "float32"}, {"name": "1898", "dtype": "float32"}, {"name": "1899", "dtype": "float32"}, {"name": "1900", "dtype": "float32"}, {"name": "1901", "dtype": "float32"}, {"name": "1902", "dtype": "float32"}, {"name": "1903", "dtype": "float32"}, {"name": "1904", "dtype": "float32"}, {"name": "1905", "dtype": "float32"}, {"name": "1906", "dtype": "float32"}, {"name": "1907", "dtype": "float32"}, {"name": "1908", "dtype": "float32"}, {"name": "1909", "dtype": "float32"}, {"name": "1910", "dtype": "float32"}, {"name": "1911", "dtype": "float32"}, {"name": "1912", "dtype": "float32"}, {"name": "1913", "dtype": "float32"}, {"name": "1914", "dtype": "float32"}, {"name": "1915", "dtype": "float32"}, {"name": "1916", "dtype": "float32"}, {"name": "1917", "dtype": "float32"}, {"name": "1918", "dtype": "float32"}, {"name": "1919", "dtype": "float32"}, {"name": "1920", "dtype": "float32"}, {"name": "1921", "dtype": "float32"}, {"name": "1922", "dtype": "float32"}, {"name": "1923", "dtype": "float32"}, {"name": "1924", "dtype": "float32"}, {"name": "1925", "dtype": "float32"}, {"name": "1926", "dtype": "float32"}, {"name": "1927", "dtype": "float32"}, {"name": "1928", "dtype": "float32"}, {"name": "1929", "dtype": "float32"}, {"name": "1930", "dtype": "float32"}, {"name": "1931", "dtype": "float32"}, {"name": "1932", "dtype": "float32"}, {"name": "1933", "dtype": "float32"}, {"name": "1934", "dtype": "float32"}, {"name": "1935", "dtype": "float32"}, {"name": "1936", "dtype": "float32"}, {"name": "1937", "dtype": "float32"}, {"name": "1938", "dtype": "float32"}, {"name": "1939", "dtype": "float32"}, {"name": "1940", "dtype": "float32"}, {"name": "1941", "dtype": "float32"}, {"name": "1942", "dtype": "float32"}, {"name": "1943", "dtype": "float32"}, {"name": "1944", "dtype": "float32"}, {"name": "1945", "dtype": "float32"}, {"name": "1946", "dtype": "float32"}, {"name": "1947", "dtype": "float32"}, {"name": "1948", "dtype": "float32"}, {"name": "1949", "dtype": "float32"}, {"name": "1950", "dtype": "float32"}, {"name": "1951", "dtype": "float32"}, {"name": "1952", "dtype": "float32"}, {"name": "1953", "dtype": "float32"}, {"name": "1954", "dtype": "float32"}, {"name": "1955", "dtype": "float32"}, {"name": "1956", "dtype": "float32"}, {"name": "1957", "dtype": "float32"}, {"name": "1958", "dtype": "float32"}, {"name": "1959", "dtype": "float32"}, {"name": "1960", "dtype": "float32"}, {"name": "1961", "dtype": "float32"}, {"name": "1962", "dtype": "float32"}, {"name": "1963", "dtype": "float32"}, {"name": "1964", "dtype": "float32"}, {"name": "1965", "dtype": "float32"}, {"name": "1966", "dtype": "float32"}, {"name": "1967", "dtype": "float32"}, {"name": "1968", "dtype": "float32"}, {"name": "1969", "dtype": "float32"}, {"name": "1970", "dtype": "float32"}, {"name": "1971", "dtype": "float32"}, {"name": "1972", "dtype": "float32"}, {"name": "1973", "dtype": "float32"}, {"name": "1974", "dtype": "float32"}, {"name": "1975", "dtype": "float32"}, {"name": "1976", "dtype": "float32"}, {"name": "1977", "dtype": "float32"}, {"name": "1978", "dtype": "float32"}, {"name": "1979", "dtype": "float32"}, {"name": "1980", "dtype": "float32"}, {"name": "1981", "dtype": "float32"}, {"name": "1982", "dtype": "float32"}, {"name": "1983", "dtype": "float32"}, {"name": "1984", "dtype": "float32"}, {"name": "1985", "dtype": "float32"}, {"name": "1986", "dtype": "float32"}, {"name": "1987", "dtype": "float32"}, {"name": "1988", "dtype": "float32"}, {"name": "1989", "dtype": "float32"}, {"name": "1990", "dtype": "float32"}, {"name": "1991", "dtype": "float32"}, {"name": "1992", "dtype": "float32"}, {"name": "1993", "dtype": "float32"}, {"name": "1994", "dtype": "float32"}, {"name": "1995", "dtype": "float32"}, {"name": "1996", "dtype": "float32"}, {"name": "1997", "dtype": "float32"}, {"name": "1998", "dtype": "float32"}, {"name": "1999", "dtype": "float32"}, {"name": "2000", "dtype": "float32"}, {"name": "2001", "dtype": "float32"}, {"name": "2002", "dtype": "float32"}, {"name": "2003", "dtype": "float32"}, {"name": "2004", "dtype": "float32"}, {"name": "2005", "dtype": "float32"}, {"name": "2006", "dtype": "float32"}, {"name": "2007", "dtype": "float32"}, {"name": "2008", "dtype": "float32"}, {"name": "2009", "dtype": "float32"}, {"name": "2010", "dtype": "float32"}, {"name": "2011", "dtype": "float32"}, {"name": "2012", "dtype": "float32"}, {"name": "2013", "dtype": "float32"}, {"name": "2014", "dtype": "float32"}, {"name": "2015", "dtype": "float32"}, {"name": "2016", "dtype": "float32"}, {"name": "2017", "dtype": "float32"}, {"name": "2018", "dtype": "float32"}, {"name": "2019", "dtype": "float32"}, {"name": "2020", "dtype": "float32"}, {"name": "2021", "dtype": "float32"}, {"name": "2022", "dtype": "float32"}, {"name": "2023", "dtype": "float32"}, {"name": "2024", "dtype": "float32"}, {"name": "2025", "dtype": "float32"}, {"name": "2026", "dtype": "float32"}, {"name": "2027", "dtype": "float32"}, {"name": "2028", "dtype": "float32"}, {"name": "2029", "dtype": "float32"}, {"name": "2030", "dtype": "float32"}, {"name": "2031", "dtype": "float32"}, {"name": "2032", "dtype": "float32"}, {"name": "2033", "dtype": "float32"}, {"name": "2034", "dtype": "float32"}, {"name": "2035", "dtype": "float32"}, {"name": "2036", "dtype": "float32"}, {"name": "2037", "dtype": "float32"}, {"name": "2038", "dtype": "float32"}, {"name": "2039", "dtype": "float32"}, {"name": "2040", "dtype": "float32"}, {"name": "2041", "dtype": "float32"}, {"name": "2042", "dtype": "float32"}, {"name": "2043", "dtype": "float32"}, {"name": "2044", "dtype": "float32"}, {"name": "2045", "dtype": "float32"}, {"name": "2046", "dtype": "float32"}, {"name": "2047", "dtype": "float32"}, {"name": "label", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 307650065.625, "num_examples": 37500}, {"name": "test", "num_bytes": 102550020.0, "num_examples": 12500}], "download_size": 565185055, "dataset_size": 410200085.625}}
2023-08-18T17:05:42+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Spirit_GPTNEO_Baseline" More Information needed
[ "# Dataset Card for \"Spirit_GPTNEO_Baseline\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Spirit_GPTNEO_Baseline\"\n\nMore Information needed" ]
[ 6, 20 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"Spirit_GPTNEO_Baseline\"\n\nMore Information needed" ]
9605bbc9291d27288c75406a6e7eefa6a91432fe
# AutoTrain Dataset for project: nlp ## Dataset Description This dataset has been automatically processed by AutoTrain for project nlp. ### Languages The BCP-47 code for the dataset's language is en. ## Dataset Structure ### Data Instances A sample from this dataset looks as follows: ```json [ { "context": "\u0417\u0434\u0440\u0430\u0432\u0441\u0442\u0432\u0443\u0439\u0442\u0435 \ud83e\udd1d \u041d\u0430 \u0441\u043f\u0435\u0446\u0438\u0430\u043b\u044c\u043d\u043e\u043c \u0433\u0435\u043b\u0435, \u0432 \u043a\u043e\u0442\u043e\u0440\u043e\u043c \u043f\u043e\u0434\u043e\u0431\u0440\u0430\u043d\u044b \u0438\u043d\u0433\u0440\u0435\u0434\u0438\u0435\u043d\u0442\u044b \u043e\u0442\u0432\u0435\u0447\u0430\u044e\u0449\u0438\u0435 \u0437\u0430 \u043f\u043e\u0434\u0442\u044f\u0436\u043a\u0443, \u0430\u043d\u0442\u0438\u0446\u0435\u043b\u043b\u044e\u043b\u0438\u0442\u043d\u044b\u0439 \u044d\u0444\u0444\u0435\u043a\u0442 \u0438 \u0436\u0438\u0440\u043e\u0437\u0436\u0438\u0433\u0430\u043d\u0438\u0435 ! \u0412\u044b\u043f\u043e\u043b\u043d\u044f\u0442\u044c\u0441\u044f \u043e\u043f\u0440\u0435\u0434\u0435\u043b\u0451\u043d\u043d\u044b\u0435 \u0442\u0435\u0445\u043d\u0438\u043a\u0438 \u043c\u0430\u0441\u0441\u0430\u0436\u043d\u044b\u0435, \u043f\u043e \u043e\u043f\u0440\u0435\u0434\u0435\u043b\u0451\u043d\u043d\u044b\u043c \u043b\u0438\u043d\u0438\u044f\u043c, \u043f\u0438\u043b\u0438\u043d\u0433 \u0441\u043d\u0430\u0447\u0430\u043b\u0430, \u043f\u043e\u0442\u043e\u043c \u0432\u0431\u0438\u0432\u0430\u043d\u0438\u0435 \u0413\u0435\u043b\u044f \u0432 \u043f\u043e\u0440\u044b \u0438 \u043e\u0431\u0435\u0440\u0442\u044b\u0432\u0430\u043d\u0438\u0435 \u0432 \u0438\u043d\u0444\u0440\u0430\u043a\u0440\u0430\u0441\u043d\u043e\u0435 \u043e\u0434\u0435\u044f\u043b\u043e! \u041d\u0435 \u0431\u043e\u043b\u044c\u043d\u043e)", "question": "\u0417\u0434\u0440\u0430\u0432\u0441\u0442\u0432\u0443\u0439\u0442\u0435, \u0440\u0430\u0441\u0441\u043a\u0430\u0436\u0438\u0442\u0435 \u043f\u0440\u043e \u043f\u0440\u043e\u0446\u0435\u0434\u0443\u0440\u0443 \u043a\u0430\u0440\u0430\u043c\u0435\u043b\u044c\u043d\u0430\u044f \u043b\u0438\u043f\u0430\u043a\u0441\u0430\u0446\u0438\u044f , \u043a\u0430\u043a \u044d\u0442\u043e \u0434\u0435\u043b\u0430\u0435\u0442\u0441\u044f , \u0431\u043e\u043b\u044c\u043d\u043e \u044d\u0442\u043e ?", "answers.text": [ "\u0417\u0434\u0440\u0430\u0432\u0441\u0442\u0432\u0443\u0439\u0442\u0435 \ud83e\udd1d \u041d\u0430 \u0441\u043f\u0435\u0446\u0438\u0430\u043b\u044c\u043d\u043e\u043c \u0433\u0435\u043b\u0435, \u0432 \u043a\u043e\u0442\u043e\u0440\u043e\u043c \u043f\u043e\u0434\u043e\u0431\u0440\u0430\u043d\u044b \u0438\u043d\u0433\u0440\u0435\u0434\u0438\u0435\u043d\u0442\u044b \u043e\u0442\u0432\u0435\u0447\u0430\u044e\u0449\u0438\u0435 \u0437\u0430 \u043f\u043e\u0434\u0442\u044f\u0436\u043a\u0443, \u0430\u043d\u0442\u0438\u0446\u0435\u043b\u043b\u044e\u043b\u0438\u0442\u043d\u044b\u0439 \u044d\u0444\u0444\u0435\u043a\u0442 \u0438 \u0436\u0438\u0440\u043e\u0437\u0436\u0438\u0433\u0430\u043d\u0438\u0435 ! \u0412\u044b\u043f\u043e\u043b\u043d\u044f\u0442\u044c\u0441\u044f \u043e\u043f\u0440\u0435\u0434\u0435\u043b\u0451\u043d\u043d\u044b\u0435 \u0442\u0435\u0445\u043d\u0438\u043a\u0438 \u043c\u0430\u0441\u0441\u0430\u0436\u043d\u044b\u0435, \u043f\u043e \u043e\u043f\u0440\u0435\u0434\u0435\u043b\u0451\u043d\u043d\u044b\u043c \u043b\u0438\u043d\u0438\u044f\u043c, \u043f\u0438\u043b\u0438\u043d\u0433 \u0441\u043d\u0430\u0447\u0430\u043b\u0430, \u043f\u043e\u0442\u043e\u043c \u0432\u0431\u0438\u0432\u0430\u043d\u0438\u0435 \u0413\u0435\u043b\u044f \u0432 \u043f\u043e\u0440\u044b \u0438 \u043e\u0431\u0435\u0440\u0442\u044b\u0432\u0430\u043d\u0438\u0435 \u0432 \u0438\u043d\u0444\u0440\u0430\u043a\u0440\u0430\u0441\u043d\u043e\u0435 \u043e\u0434\u0435\u044f\u043b\u043e! \u041d\u0435 \u0431\u043e\u043b\u044c\u043d\u043e)" ], "answers.answer_start": [ -1 ] }, { "context": "\u0417\u0434\u0440\u0430\u0432\u0441\u0442\u0432\u0443\u0439\u0442\u0435 \ud83e\udd1d \u0414\u0430, \u043d\u0430 \u0447\u0442\u043e \u0445\u043e\u0442\u0438\u0442\u0435?) \u0441 \u043a\u0430\u043a\u043e\u0433\u043e \u0447\u0438\u0441\u043b\u0430? \u0412 \u043a\u0430\u043a\u043e\u0435 \u0432\u0440\u0435\u043c\u044f \u0443\u0434\u043e\u0431\u043d\u043e?", "question": "\u0410 \u043c\u043e\u0436\u043d\u043e \u043d\u0430 \u044f\u043d\u0432\u0430\u0440\u044c \u0443\u0436\u0435 \u0437\u0430\u043f\u0438\u0441\u0430\u0442\u044c\u0441\u044f ?", "answers.text": [ "\u0417\u0434\u0440\u0430\u0432\u0441\u0442\u0432\u0443\u0439\u0442\u0435 \ud83e\udd1d \u0414\u0430, \u043d\u0430 \u0447\u0442\u043e \u0445\u043e\u0442\u0438\u0442\u0435?) \u0441 \u043a\u0430\u043a\u043e\u0433\u043e \u0447\u0438\u0441\u043b\u0430? \u0412 \u043a\u0430\u043a\u043e\u0435 \u0432\u0440\u0435\u043c\u044f \u0443\u0434\u043e\u0431\u043d\u043e?" ], "answers.answer_start": [ -1 ] } ] ``` ### Dataset Fields The dataset has the following fields (also called "features"): ```json { "context": "Value(dtype='string', id=None)", "question": "Value(dtype='string', id=None)", "answers.text": "Sequence(feature=Value(dtype='string', id=None), length=-1, id=None)", "answers.answer_start": "Sequence(feature=Value(dtype='int32', id=None), length=-1, id=None)" } ``` ### Dataset Splits This dataset is split into a train and validation split. The split sizes are as follow: | Split name | Num samples | | ------------ | ------------------- | | train | 248 | | valid | 63 |
SergeyKarpenko1/autotrain-data-nlp
[ "language:en", "region:us" ]
2023-08-18T16:53:43+00:00
{"language": ["en"]}
2023-08-18T16:57:12+00:00
[]
[ "en" ]
TAGS #language-English #region-us
AutoTrain Dataset for project: nlp ================================== Dataset Description ------------------- This dataset has been automatically processed by AutoTrain for project nlp. ### Languages The BCP-47 code for the dataset's language is en. Dataset Structure ----------------- ### Data Instances A sample from this dataset looks as follows: ### Dataset Fields The dataset has the following fields (also called "features"): ### Dataset Splits This dataset is split into a train and validation split. The split sizes are as follow:
[ "### Languages\n\n\nThe BCP-47 code for the dataset's language is en.\n\n\nDataset Structure\n-----------------", "### Data Instances\n\n\nA sample from this dataset looks as follows:", "### Dataset Fields\n\n\nThe dataset has the following fields (also called \"features\"):", "### Dataset Splits\n\n\nThis dataset is split into a train and validation split. The split sizes are as follow:" ]
[ "TAGS\n#language-English #region-us \n", "### Languages\n\n\nThe BCP-47 code for the dataset's language is en.\n\n\nDataset Structure\n-----------------", "### Data Instances\n\n\nA sample from this dataset looks as follows:", "### Dataset Fields\n\n\nThe dataset has the following fields (also called \"features\"):", "### Dataset Splits\n\n\nThis dataset is split into a train and validation split. The split sizes are as follow:" ]
[ 10, 26, 17, 23, 27 ]
[ "passage: TAGS\n#language-English #region-us \n### Languages\n\n\nThe BCP-47 code for the dataset's language is en.\n\n\nDataset Structure\n-----------------### Data Instances\n\n\nA sample from this dataset looks as follows:### Dataset Fields\n\n\nThe dataset has the following fields (also called \"features\"):### Dataset Splits\n\n\nThis dataset is split into a train and validation split. The split sizes are as follow:" ]
990df8bbebab887b78e3576e90f12e3a6803f969
# Dataset Card for "newspaper-type" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
davanstrien/newspaper-type
[ "region:us" ]
2023-08-18T16:54:23+00:00
{"configs": [{"config_name": "cleaned", "data_files": [{"split": "train", "path": "cleaned/train-*"}]}, {"config_name": "davanstrien--newspaper-type", "data_files": [{"split": "train", "path": "davanstrien--newspaper-type/train-*"}]}, {"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": [{"config_name": "cleaned", "features": [{"name": "filename", "dtype": "string"}, {"name": "art", "dtype": "float64"}, {"name": "text", "dtype": "string"}, {"name": "issue_name", "dtype": "string"}, {"name": "type", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 878196.063583815, "num_examples": 143}], "download_size": 0, "dataset_size": 878196.063583815}, {"config_name": "davanstrien--newspaper-type", "features": [{"name": "filename", "dtype": "string"}, {"name": "art", "dtype": "float64"}, {"name": "text", "dtype": "string"}, {"name": "issue_name", "dtype": "string"}, {"name": "type", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1062433.0, "num_examples": 173}], "download_size": 662620, "dataset_size": 1062433.0}, {"config_name": "default", "features": [{"name": "filename", "dtype": "string"}, {"name": "art", "dtype": "float64"}, {"name": "text", "dtype": "string"}, {"name": "issue_name", "dtype": "string"}, {"name": "type", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1062433.0, "num_examples": 173}], "download_size": 0, "dataset_size": 1062433.0}]}
2023-08-18T17:04:41+00:00
[]
[]
TAGS #region-us
# Dataset Card for "newspaper-type" More Information needed
[ "# Dataset Card for \"newspaper-type\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"newspaper-type\"\n\nMore Information needed" ]
[ 6, 14 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"newspaper-type\"\n\nMore Information needed" ]
5c889e19f564d3892c6052fd57ab794576aa97bd
# Dataset of watatsuki_no_toyohime/綿月豊姫 (Touhou) This is the dataset of watatsuki_no_toyohime/綿月豊姫 (Touhou), containing 265 images and their tags. The core tags of this character are `blonde_hair, long_hair, hat, yellow_eyes, ribbon, bow, hat_ribbon, breasts, white_headwear`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:------------------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 265 | 199.74 MiB | [Download](https://huggingface.co/datasets/CyberHarem/watatsuki_no_toyohime_touhou/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 265 | 146.17 MiB | [Download](https://huggingface.co/datasets/CyberHarem/watatsuki_no_toyohime_touhou/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 478 | 260.95 MiB | [Download](https://huggingface.co/datasets/CyberHarem/watatsuki_no_toyohime_touhou/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 265 | 187.76 MiB | [Download](https://huggingface.co/datasets/CyberHarem/watatsuki_no_toyohime_touhou/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 478 | 323.56 MiB | [Download](https://huggingface.co/datasets/CyberHarem/watatsuki_no_toyohime_touhou/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/watatsuki_no_toyohime_touhou', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 0 | 5 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, bangs, belt, long_sleeves, looking_at_viewer, purple_dress, simple_background, solo, white_background, white_shirt, buttons, collared_shirt, hat_bow, single_strap, large_breasts, long_dress, one-hour_drawing_challenge, blush, brown_eyes, full_body, hair_between_eyes, open_mouth, pinafore_dress, purple_bow, smile, wavy_hair | | 1 | 17 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, bangs, smile, solo, white_shirt, hat_bow, long_sleeves, purple_dress, holding_fan, looking_at_viewer, collared_shirt, folding_fan, belt, blush, purple_bow, closed_mouth, buttons, simple_background, blue_dress, brown_eyes, cowboy_shot, single_strap, purple_ribbon, hair_between_eyes, pinafore_dress, standing, upper_body | | 2 | 5 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, belt, looking_at_viewer, shirt, smile, solo, blush, hat_bow, juliet_sleeves, purple_dress, simple_background, sitting, very_long_hair | | 3 | 25 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, solo, belt, dress, smile, folding_fan | | 4 | 6 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | 1girl, peach, solo, belt, open_mouth, smile, dress | | 5 | 5 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | 2girls, peach, smile, blush, dress, belt, open_mouth, sitting | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | bangs | belt | long_sleeves | looking_at_viewer | purple_dress | simple_background | solo | white_background | white_shirt | buttons | collared_shirt | hat_bow | single_strap | large_breasts | long_dress | one-hour_drawing_challenge | blush | brown_eyes | full_body | hair_between_eyes | open_mouth | pinafore_dress | purple_bow | smile | wavy_hair | holding_fan | folding_fan | closed_mouth | blue_dress | cowboy_shot | purple_ribbon | standing | upper_body | shirt | juliet_sleeves | sitting | very_long_hair | dress | peach | 2girls | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:--------|:-------|:---------------|:--------------------|:---------------|:--------------------|:-------|:-------------------|:--------------|:----------|:-----------------|:----------|:---------------|:----------------|:-------------|:-----------------------------|:--------|:-------------|:------------|:--------------------|:-------------|:-----------------|:-------------|:--------|:------------|:--------------|:--------------|:---------------|:-------------|:--------------|:----------------|:-----------|:-------------|:--------|:-----------------|:----------|:-----------------|:--------|:--------|:---------| | 0 | 5 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | 1 | 17 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | X | X | X | X | X | X | | X | X | X | X | X | | | | X | X | | X | | X | X | X | | X | X | X | X | X | X | X | X | | | | | | | | | 2 | 5 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | | X | | X | X | X | X | | | | | X | | | | | X | | | | | | | X | | | | | | | | | | X | X | X | X | | | | | 3 | 25 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | | X | | | | | X | | | | | | | | | | | | | | | | | X | | | X | | | | | | | | | | | X | | | | 4 | 6 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | X | | X | | | | | X | | | | | | | | | | | | | | X | | | X | | | | | | | | | | | | | | X | X | | | 5 | 5 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | | | X | | | | | | | | | | | | | | | X | | | | X | | | X | | | | | | | | | | | | X | | X | X | X |
CyberHarem/watatsuki_no_toyohime_touhou
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-18T16:55:08+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-01-15T03:20:38+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of watatsuki\_no\_toyohime/綿月豊姫 (Touhou) ================================================ This is the dataset of watatsuki\_no\_toyohime/綿月豊姫 (Touhou), containing 265 images and their tags. The core tags of this character are 'blonde\_hair, long\_hair, hat, yellow\_eyes, ribbon, bow, hat\_ribbon, breasts, white\_headwear', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ 44, 61, 5, 4 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.### Raw Text Version### Table Version" ]
aaa68b254c6145cc509e830b93314dd428165fc4
# Dataset of usami_sumireko/宇佐見菫子 (Touhou) This is the dataset of usami_sumireko/宇佐見菫子 (Touhou), containing 500 images and their tags. The core tags of this character are `brown_hair, glasses, brown_eyes, red-framed_eyewear, hat, twintails, bow, low_twintails, short_hair, hat_bow, semi-rimless_eyewear, under-rim_eyewear, black_headwear, bangs`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:-----------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 500 | 563.85 MiB | [Download](https://huggingface.co/datasets/CyberHarem/usami_sumireko_touhou/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 500 | 360.44 MiB | [Download](https://huggingface.co/datasets/CyberHarem/usami_sumireko_touhou/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 1103 | 720.48 MiB | [Download](https://huggingface.co/datasets/CyberHarem/usami_sumireko_touhou/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 500 | 512.94 MiB | [Download](https://huggingface.co/datasets/CyberHarem/usami_sumireko_touhou/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 1103 | 955.34 MiB | [Download](https://huggingface.co/datasets/CyberHarem/usami_sumireko_touhou/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/usami_sumireko_touhou', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 0 | 14 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, nipples, solo, blush, looking_at_viewer, large_breasts, sweat, navel, open_mouth, smile, no_bra, open_shirt, plaid, simple_background, skirt, underwear | | 1 | 12 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, cape, clothes_writing, plaid, skirt, smile, solo, shirt, long_sleeves, open_mouth, school_uniform, looking_at_viewer, gloves | | 2 | 16 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, long_sleeves, plaid_skirt, plaid_vest, solo, purple_skirt, smile, looking_at_viewer, purple_vest, shoes, white_socks, full_body, kneehighs, cloak, runes, white_gloves, white_shirt, black_footwear, clothes_writing, black_cape, closed_mouth, open_mouth, white_bow, card | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | nipples | solo | blush | looking_at_viewer | large_breasts | sweat | navel | open_mouth | smile | no_bra | open_shirt | plaid | simple_background | skirt | underwear | cape | clothes_writing | shirt | long_sleeves | school_uniform | gloves | plaid_skirt | plaid_vest | purple_skirt | purple_vest | shoes | white_socks | full_body | kneehighs | cloak | runes | white_gloves | white_shirt | black_footwear | black_cape | closed_mouth | white_bow | card | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:----------|:-------|:--------|:--------------------|:----------------|:--------|:--------|:-------------|:--------|:---------|:-------------|:--------|:--------------------|:--------|:------------|:-------|:------------------|:--------|:---------------|:-----------------|:---------|:--------------|:-------------|:---------------|:--------------|:--------|:--------------|:------------|:------------|:--------|:--------|:---------------|:--------------|:-----------------|:-------------|:---------------|:------------|:-------| | 0 | 14 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | 1 | 12 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | | X | | X | | | | X | X | | | X | | X | | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | 2 | 16 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | | X | | X | | | | X | X | | | | | | | | X | | X | | | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X |
CyberHarem/usami_sumireko_touhou
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-18T17:00:52+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-01-14T23:20:22+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of usami\_sumireko/宇佐見菫子 (Touhou) ========================================= This is the dataset of usami\_sumireko/宇佐見菫子 (Touhou), containing 500 images and their tags. The core tags of this character are 'brown\_hair, glasses, brown\_eyes, red-framed\_eyewear, hat, twintails, bow, low\_twintails, short\_hair, hat\_bow, semi-rimless\_eyewear, under-rim\_eyewear, black\_headwear, bangs', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ 44, 61, 5, 4 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.### Raw Text Version### Table Version" ]
d1bf64f7d7961f81f41e9fa8374b30baa9274eb1
# Dataset Card for "Spirit_BERT_Finetuned" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
EgilKarlsen/Spirit_BERT_Finetuned
[ "region:us" ]
2023-08-18T17:06:57+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "0", "dtype": "float32"}, {"name": "1", "dtype": "float32"}, {"name": "2", "dtype": "float32"}, {"name": "3", "dtype": "float32"}, {"name": "4", "dtype": "float32"}, {"name": "5", "dtype": "float32"}, {"name": "6", "dtype": "float32"}, {"name": "7", "dtype": "float32"}, {"name": "8", "dtype": "float32"}, {"name": "9", "dtype": "float32"}, {"name": "10", "dtype": "float32"}, {"name": "11", "dtype": "float32"}, {"name": "12", "dtype": "float32"}, {"name": "13", "dtype": "float32"}, {"name": "14", "dtype": "float32"}, {"name": "15", "dtype": "float32"}, {"name": "16", "dtype": "float32"}, {"name": "17", "dtype": "float32"}, {"name": "18", "dtype": "float32"}, {"name": "19", "dtype": "float32"}, {"name": "20", "dtype": "float32"}, {"name": "21", "dtype": "float32"}, {"name": "22", "dtype": "float32"}, {"name": "23", "dtype": "float32"}, {"name": "24", "dtype": "float32"}, {"name": "25", "dtype": "float32"}, {"name": "26", "dtype": "float32"}, {"name": "27", "dtype": "float32"}, {"name": "28", "dtype": "float32"}, {"name": "29", "dtype": "float32"}, {"name": "30", "dtype": "float32"}, {"name": "31", "dtype": "float32"}, {"name": "32", "dtype": "float32"}, {"name": "33", "dtype": "float32"}, {"name": "34", "dtype": "float32"}, {"name": "35", "dtype": "float32"}, {"name": "36", "dtype": "float32"}, {"name": "37", "dtype": "float32"}, {"name": "38", "dtype": "float32"}, {"name": "39", "dtype": "float32"}, {"name": "40", "dtype": "float32"}, {"name": "41", "dtype": "float32"}, {"name": "42", "dtype": "float32"}, {"name": "43", "dtype": "float32"}, {"name": "44", "dtype": "float32"}, {"name": "45", "dtype": "float32"}, {"name": "46", "dtype": "float32"}, {"name": "47", "dtype": "float32"}, {"name": "48", "dtype": "float32"}, {"name": "49", "dtype": "float32"}, {"name": "50", "dtype": "float32"}, {"name": "51", "dtype": "float32"}, {"name": "52", "dtype": "float32"}, {"name": "53", "dtype": "float32"}, {"name": "54", "dtype": "float32"}, {"name": "55", "dtype": "float32"}, {"name": "56", "dtype": "float32"}, {"name": "57", "dtype": "float32"}, {"name": "58", "dtype": "float32"}, {"name": "59", "dtype": "float32"}, {"name": "60", "dtype": "float32"}, {"name": "61", "dtype": "float32"}, {"name": "62", "dtype": "float32"}, {"name": "63", "dtype": "float32"}, {"name": "64", "dtype": "float32"}, {"name": "65", "dtype": "float32"}, {"name": "66", "dtype": "float32"}, {"name": "67", "dtype": "float32"}, {"name": "68", "dtype": "float32"}, {"name": "69", "dtype": "float32"}, {"name": "70", "dtype": "float32"}, {"name": "71", "dtype": "float32"}, {"name": "72", "dtype": "float32"}, {"name": "73", "dtype": "float32"}, {"name": "74", "dtype": "float32"}, {"name": "75", "dtype": "float32"}, {"name": "76", "dtype": "float32"}, {"name": "77", "dtype": "float32"}, {"name": "78", "dtype": "float32"}, {"name": "79", "dtype": "float32"}, {"name": "80", "dtype": "float32"}, {"name": "81", "dtype": "float32"}, {"name": "82", "dtype": "float32"}, {"name": "83", "dtype": "float32"}, {"name": "84", "dtype": "float32"}, {"name": "85", "dtype": "float32"}, {"name": "86", "dtype": "float32"}, {"name": "87", "dtype": "float32"}, {"name": "88", "dtype": "float32"}, {"name": "89", "dtype": "float32"}, {"name": "90", "dtype": "float32"}, {"name": "91", "dtype": "float32"}, {"name": "92", "dtype": "float32"}, {"name": "93", "dtype": "float32"}, {"name": "94", "dtype": "float32"}, {"name": "95", "dtype": "float32"}, {"name": "96", "dtype": "float32"}, {"name": "97", "dtype": "float32"}, {"name": "98", "dtype": "float32"}, {"name": "99", "dtype": "float32"}, {"name": "100", "dtype": "float32"}, {"name": "101", "dtype": "float32"}, {"name": "102", "dtype": "float32"}, {"name": "103", "dtype": "float32"}, {"name": "104", "dtype": "float32"}, {"name": "105", "dtype": "float32"}, {"name": "106", "dtype": "float32"}, {"name": "107", "dtype": "float32"}, {"name": "108", "dtype": "float32"}, {"name": "109", "dtype": "float32"}, {"name": "110", "dtype": "float32"}, {"name": "111", "dtype": "float32"}, {"name": "112", "dtype": "float32"}, {"name": "113", "dtype": "float32"}, {"name": "114", "dtype": "float32"}, {"name": "115", "dtype": "float32"}, {"name": "116", "dtype": "float32"}, {"name": "117", "dtype": "float32"}, {"name": "118", "dtype": "float32"}, {"name": "119", "dtype": "float32"}, {"name": "120", "dtype": "float32"}, {"name": "121", "dtype": "float32"}, {"name": "122", "dtype": "float32"}, {"name": "123", "dtype": "float32"}, {"name": "124", "dtype": "float32"}, {"name": "125", "dtype": "float32"}, {"name": "126", "dtype": "float32"}, {"name": "127", "dtype": "float32"}, {"name": "128", "dtype": "float32"}, {"name": "129", "dtype": "float32"}, {"name": "130", "dtype": "float32"}, {"name": "131", "dtype": "float32"}, {"name": "132", "dtype": "float32"}, {"name": "133", "dtype": "float32"}, {"name": "134", "dtype": "float32"}, {"name": "135", "dtype": "float32"}, {"name": "136", "dtype": "float32"}, {"name": "137", "dtype": "float32"}, {"name": "138", "dtype": "float32"}, {"name": "139", "dtype": "float32"}, {"name": "140", "dtype": "float32"}, {"name": "141", "dtype": "float32"}, {"name": "142", "dtype": "float32"}, {"name": "143", "dtype": "float32"}, {"name": "144", "dtype": "float32"}, {"name": "145", "dtype": "float32"}, {"name": "146", "dtype": "float32"}, {"name": "147", "dtype": "float32"}, {"name": "148", "dtype": "float32"}, {"name": "149", "dtype": "float32"}, {"name": "150", "dtype": "float32"}, {"name": "151", "dtype": "float32"}, {"name": "152", "dtype": "float32"}, {"name": "153", "dtype": "float32"}, {"name": "154", "dtype": "float32"}, {"name": "155", "dtype": "float32"}, {"name": "156", "dtype": "float32"}, {"name": "157", "dtype": "float32"}, {"name": "158", "dtype": "float32"}, {"name": "159", "dtype": "float32"}, {"name": "160", "dtype": "float32"}, {"name": "161", "dtype": "float32"}, {"name": "162", "dtype": "float32"}, {"name": "163", "dtype": "float32"}, {"name": "164", "dtype": "float32"}, {"name": "165", "dtype": "float32"}, {"name": "166", "dtype": "float32"}, {"name": "167", "dtype": "float32"}, {"name": "168", "dtype": "float32"}, {"name": "169", "dtype": "float32"}, {"name": "170", "dtype": "float32"}, {"name": "171", "dtype": "float32"}, {"name": "172", "dtype": "float32"}, {"name": "173", "dtype": "float32"}, {"name": "174", "dtype": "float32"}, {"name": "175", "dtype": "float32"}, {"name": "176", "dtype": "float32"}, {"name": "177", "dtype": "float32"}, {"name": "178", "dtype": "float32"}, {"name": "179", "dtype": "float32"}, {"name": "180", "dtype": "float32"}, {"name": "181", "dtype": "float32"}, {"name": "182", "dtype": "float32"}, {"name": "183", "dtype": "float32"}, {"name": "184", "dtype": "float32"}, {"name": "185", "dtype": "float32"}, {"name": "186", "dtype": "float32"}, {"name": "187", "dtype": "float32"}, {"name": "188", "dtype": "float32"}, {"name": "189", "dtype": "float32"}, {"name": "190", "dtype": "float32"}, {"name": "191", "dtype": "float32"}, {"name": "192", "dtype": "float32"}, {"name": "193", "dtype": "float32"}, {"name": "194", "dtype": "float32"}, {"name": "195", "dtype": "float32"}, {"name": "196", "dtype": "float32"}, {"name": "197", "dtype": "float32"}, {"name": "198", "dtype": "float32"}, {"name": "199", "dtype": "float32"}, {"name": "200", "dtype": "float32"}, {"name": "201", "dtype": "float32"}, {"name": "202", "dtype": "float32"}, {"name": "203", "dtype": "float32"}, {"name": "204", "dtype": "float32"}, {"name": "205", "dtype": "float32"}, {"name": "206", "dtype": "float32"}, {"name": "207", "dtype": "float32"}, {"name": "208", "dtype": "float32"}, {"name": "209", "dtype": "float32"}, {"name": "210", "dtype": "float32"}, {"name": "211", "dtype": "float32"}, {"name": "212", "dtype": "float32"}, {"name": "213", "dtype": "float32"}, {"name": "214", "dtype": "float32"}, {"name": "215", "dtype": "float32"}, {"name": "216", "dtype": "float32"}, {"name": "217", "dtype": "float32"}, {"name": "218", "dtype": "float32"}, {"name": "219", "dtype": "float32"}, {"name": "220", "dtype": "float32"}, {"name": "221", "dtype": "float32"}, {"name": "222", "dtype": "float32"}, {"name": "223", "dtype": "float32"}, {"name": "224", "dtype": "float32"}, {"name": "225", "dtype": "float32"}, {"name": "226", "dtype": "float32"}, {"name": "227", "dtype": "float32"}, {"name": "228", "dtype": "float32"}, {"name": "229", "dtype": "float32"}, {"name": "230", "dtype": "float32"}, {"name": "231", "dtype": "float32"}, {"name": "232", "dtype": "float32"}, {"name": "233", "dtype": "float32"}, {"name": "234", "dtype": "float32"}, {"name": "235", "dtype": "float32"}, {"name": "236", "dtype": "float32"}, {"name": "237", "dtype": "float32"}, {"name": "238", "dtype": "float32"}, {"name": "239", "dtype": "float32"}, {"name": "240", "dtype": "float32"}, {"name": "241", "dtype": "float32"}, {"name": "242", "dtype": "float32"}, {"name": "243", "dtype": "float32"}, {"name": "244", "dtype": "float32"}, {"name": "245", "dtype": "float32"}, {"name": "246", "dtype": "float32"}, {"name": "247", "dtype": "float32"}, {"name": "248", "dtype": "float32"}, {"name": "249", "dtype": "float32"}, {"name": "250", "dtype": "float32"}, {"name": "251", "dtype": "float32"}, {"name": "252", "dtype": "float32"}, {"name": "253", "dtype": "float32"}, {"name": "254", "dtype": "float32"}, {"name": "255", "dtype": "float32"}, {"name": "256", "dtype": "float32"}, {"name": "257", "dtype": "float32"}, {"name": "258", "dtype": "float32"}, {"name": "259", "dtype": "float32"}, {"name": "260", "dtype": "float32"}, {"name": "261", "dtype": "float32"}, {"name": "262", "dtype": "float32"}, {"name": "263", "dtype": "float32"}, {"name": "264", "dtype": "float32"}, {"name": "265", "dtype": "float32"}, {"name": "266", "dtype": "float32"}, {"name": "267", "dtype": "float32"}, {"name": "268", "dtype": "float32"}, {"name": "269", "dtype": "float32"}, {"name": "270", "dtype": "float32"}, {"name": "271", "dtype": "float32"}, {"name": "272", "dtype": "float32"}, {"name": "273", "dtype": "float32"}, {"name": "274", "dtype": "float32"}, {"name": "275", "dtype": "float32"}, {"name": "276", "dtype": "float32"}, {"name": "277", "dtype": "float32"}, {"name": "278", "dtype": "float32"}, {"name": "279", "dtype": "float32"}, {"name": "280", "dtype": "float32"}, {"name": "281", "dtype": "float32"}, {"name": "282", "dtype": "float32"}, {"name": "283", "dtype": "float32"}, {"name": "284", "dtype": "float32"}, {"name": "285", "dtype": "float32"}, {"name": "286", "dtype": "float32"}, {"name": "287", "dtype": "float32"}, {"name": "288", "dtype": "float32"}, {"name": "289", "dtype": "float32"}, {"name": "290", "dtype": "float32"}, {"name": "291", "dtype": "float32"}, {"name": "292", "dtype": "float32"}, {"name": "293", "dtype": "float32"}, {"name": "294", "dtype": "float32"}, {"name": "295", "dtype": "float32"}, {"name": "296", "dtype": "float32"}, {"name": "297", "dtype": "float32"}, {"name": "298", "dtype": "float32"}, {"name": "299", "dtype": "float32"}, {"name": "300", "dtype": "float32"}, {"name": "301", "dtype": "float32"}, {"name": "302", "dtype": "float32"}, {"name": "303", "dtype": "float32"}, {"name": "304", "dtype": "float32"}, {"name": "305", "dtype": "float32"}, {"name": "306", "dtype": "float32"}, {"name": "307", "dtype": "float32"}, {"name": "308", "dtype": "float32"}, {"name": "309", "dtype": "float32"}, {"name": "310", "dtype": "float32"}, {"name": "311", "dtype": "float32"}, {"name": "312", "dtype": "float32"}, {"name": "313", "dtype": "float32"}, {"name": "314", "dtype": "float32"}, {"name": "315", "dtype": "float32"}, {"name": "316", "dtype": "float32"}, {"name": "317", "dtype": "float32"}, {"name": "318", "dtype": "float32"}, {"name": "319", "dtype": "float32"}, {"name": "320", "dtype": "float32"}, {"name": "321", "dtype": "float32"}, {"name": "322", "dtype": "float32"}, {"name": "323", "dtype": "float32"}, {"name": "324", "dtype": "float32"}, {"name": "325", "dtype": "float32"}, {"name": "326", "dtype": "float32"}, {"name": "327", "dtype": "float32"}, {"name": "328", "dtype": "float32"}, {"name": "329", "dtype": "float32"}, {"name": "330", "dtype": "float32"}, {"name": "331", "dtype": "float32"}, {"name": "332", "dtype": "float32"}, {"name": "333", "dtype": "float32"}, {"name": "334", "dtype": "float32"}, {"name": "335", "dtype": "float32"}, {"name": "336", "dtype": "float32"}, {"name": "337", "dtype": "float32"}, {"name": "338", "dtype": "float32"}, {"name": "339", "dtype": "float32"}, {"name": "340", "dtype": "float32"}, {"name": "341", "dtype": "float32"}, {"name": "342", "dtype": "float32"}, {"name": "343", "dtype": "float32"}, {"name": "344", "dtype": "float32"}, {"name": "345", "dtype": "float32"}, {"name": "346", "dtype": "float32"}, {"name": "347", "dtype": "float32"}, {"name": "348", "dtype": "float32"}, {"name": "349", "dtype": "float32"}, {"name": "350", "dtype": "float32"}, {"name": "351", "dtype": "float32"}, {"name": "352", "dtype": "float32"}, {"name": "353", "dtype": "float32"}, {"name": "354", "dtype": "float32"}, {"name": "355", "dtype": "float32"}, {"name": "356", "dtype": "float32"}, {"name": "357", "dtype": "float32"}, {"name": "358", "dtype": "float32"}, {"name": "359", "dtype": "float32"}, {"name": "360", "dtype": "float32"}, {"name": "361", "dtype": "float32"}, {"name": "362", "dtype": "float32"}, {"name": "363", "dtype": "float32"}, {"name": "364", "dtype": "float32"}, {"name": "365", "dtype": "float32"}, {"name": "366", "dtype": "float32"}, {"name": "367", "dtype": "float32"}, {"name": "368", "dtype": "float32"}, {"name": "369", "dtype": "float32"}, {"name": "370", "dtype": "float32"}, {"name": "371", "dtype": "float32"}, {"name": "372", "dtype": "float32"}, {"name": "373", "dtype": "float32"}, {"name": "374", "dtype": "float32"}, {"name": "375", "dtype": "float32"}, {"name": "376", "dtype": "float32"}, {"name": "377", "dtype": "float32"}, {"name": "378", "dtype": "float32"}, {"name": "379", "dtype": "float32"}, {"name": "380", "dtype": "float32"}, {"name": "381", "dtype": "float32"}, {"name": "382", "dtype": "float32"}, {"name": "383", "dtype": "float32"}, {"name": "384", "dtype": "float32"}, {"name": "385", "dtype": "float32"}, {"name": "386", "dtype": "float32"}, {"name": "387", "dtype": "float32"}, {"name": "388", "dtype": "float32"}, {"name": "389", "dtype": "float32"}, {"name": "390", "dtype": "float32"}, {"name": "391", "dtype": "float32"}, {"name": "392", "dtype": "float32"}, {"name": "393", "dtype": "float32"}, {"name": "394", "dtype": "float32"}, {"name": "395", "dtype": "float32"}, {"name": "396", "dtype": "float32"}, {"name": "397", "dtype": "float32"}, {"name": "398", "dtype": "float32"}, {"name": "399", "dtype": "float32"}, {"name": "400", "dtype": "float32"}, {"name": "401", "dtype": "float32"}, {"name": "402", "dtype": "float32"}, {"name": "403", "dtype": "float32"}, {"name": "404", "dtype": "float32"}, {"name": "405", "dtype": "float32"}, {"name": "406", "dtype": "float32"}, {"name": "407", "dtype": "float32"}, {"name": "408", "dtype": "float32"}, {"name": "409", "dtype": "float32"}, {"name": "410", "dtype": "float32"}, {"name": "411", "dtype": "float32"}, {"name": "412", "dtype": "float32"}, {"name": "413", "dtype": "float32"}, {"name": "414", "dtype": "float32"}, {"name": "415", "dtype": "float32"}, {"name": "416", "dtype": "float32"}, {"name": "417", "dtype": "float32"}, {"name": "418", "dtype": "float32"}, {"name": "419", "dtype": "float32"}, {"name": "420", "dtype": "float32"}, {"name": "421", "dtype": "float32"}, {"name": "422", "dtype": "float32"}, {"name": "423", "dtype": "float32"}, {"name": "424", "dtype": "float32"}, {"name": "425", "dtype": "float32"}, {"name": "426", "dtype": "float32"}, {"name": "427", "dtype": "float32"}, {"name": "428", "dtype": "float32"}, {"name": "429", "dtype": "float32"}, {"name": "430", "dtype": "float32"}, {"name": "431", "dtype": "float32"}, {"name": "432", "dtype": "float32"}, {"name": "433", "dtype": "float32"}, {"name": "434", "dtype": "float32"}, {"name": "435", "dtype": "float32"}, {"name": "436", "dtype": "float32"}, {"name": "437", "dtype": "float32"}, {"name": "438", "dtype": "float32"}, {"name": "439", "dtype": "float32"}, {"name": "440", "dtype": "float32"}, {"name": "441", "dtype": "float32"}, {"name": "442", "dtype": "float32"}, {"name": "443", "dtype": "float32"}, {"name": "444", "dtype": "float32"}, {"name": "445", "dtype": "float32"}, {"name": "446", "dtype": "float32"}, {"name": "447", "dtype": "float32"}, {"name": "448", "dtype": "float32"}, {"name": "449", "dtype": "float32"}, {"name": "450", "dtype": "float32"}, {"name": "451", "dtype": "float32"}, {"name": "452", "dtype": "float32"}, {"name": "453", "dtype": "float32"}, {"name": "454", "dtype": "float32"}, {"name": "455", "dtype": "float32"}, {"name": "456", "dtype": "float32"}, {"name": "457", "dtype": "float32"}, {"name": "458", "dtype": "float32"}, {"name": "459", "dtype": "float32"}, {"name": "460", "dtype": "float32"}, {"name": "461", "dtype": "float32"}, {"name": "462", "dtype": "float32"}, {"name": "463", "dtype": "float32"}, {"name": "464", "dtype": "float32"}, {"name": "465", "dtype": "float32"}, {"name": "466", "dtype": "float32"}, {"name": "467", "dtype": "float32"}, {"name": "468", "dtype": "float32"}, {"name": "469", "dtype": "float32"}, {"name": "470", "dtype": "float32"}, {"name": "471", "dtype": "float32"}, {"name": "472", "dtype": "float32"}, {"name": "473", "dtype": "float32"}, {"name": "474", "dtype": "float32"}, {"name": "475", "dtype": "float32"}, {"name": "476", "dtype": "float32"}, {"name": "477", "dtype": "float32"}, {"name": "478", "dtype": "float32"}, {"name": "479", "dtype": "float32"}, {"name": "480", "dtype": "float32"}, {"name": "481", "dtype": "float32"}, {"name": "482", "dtype": "float32"}, {"name": "483", "dtype": "float32"}, {"name": "484", "dtype": "float32"}, {"name": "485", "dtype": "float32"}, {"name": "486", "dtype": "float32"}, {"name": "487", "dtype": "float32"}, {"name": "488", "dtype": "float32"}, {"name": "489", "dtype": "float32"}, {"name": "490", "dtype": "float32"}, {"name": "491", "dtype": "float32"}, {"name": "492", "dtype": "float32"}, {"name": "493", "dtype": "float32"}, {"name": "494", "dtype": "float32"}, {"name": "495", "dtype": "float32"}, {"name": "496", "dtype": "float32"}, {"name": "497", "dtype": "float32"}, {"name": "498", "dtype": "float32"}, {"name": "499", "dtype": "float32"}, {"name": "500", "dtype": "float32"}, {"name": "501", "dtype": "float32"}, {"name": "502", "dtype": "float32"}, {"name": "503", "dtype": "float32"}, {"name": "504", "dtype": "float32"}, {"name": "505", "dtype": "float32"}, {"name": "506", "dtype": "float32"}, {"name": "507", "dtype": "float32"}, {"name": "508", "dtype": "float32"}, {"name": "509", "dtype": "float32"}, {"name": "510", "dtype": "float32"}, {"name": "511", "dtype": "float32"}, {"name": "512", "dtype": "float32"}, {"name": "513", "dtype": "float32"}, {"name": "514", "dtype": "float32"}, {"name": "515", "dtype": "float32"}, {"name": "516", "dtype": "float32"}, {"name": "517", "dtype": "float32"}, {"name": "518", "dtype": "float32"}, {"name": "519", "dtype": "float32"}, {"name": "520", "dtype": "float32"}, {"name": "521", "dtype": "float32"}, {"name": "522", "dtype": "float32"}, {"name": "523", "dtype": "float32"}, {"name": "524", "dtype": "float32"}, {"name": "525", "dtype": "float32"}, {"name": "526", "dtype": "float32"}, {"name": "527", "dtype": "float32"}, {"name": "528", "dtype": "float32"}, {"name": "529", "dtype": "float32"}, {"name": "530", "dtype": "float32"}, {"name": "531", "dtype": "float32"}, {"name": "532", "dtype": "float32"}, {"name": "533", "dtype": "float32"}, {"name": "534", "dtype": "float32"}, {"name": "535", "dtype": "float32"}, {"name": "536", "dtype": "float32"}, {"name": "537", "dtype": "float32"}, {"name": "538", "dtype": "float32"}, {"name": "539", "dtype": "float32"}, {"name": "540", "dtype": "float32"}, {"name": "541", "dtype": "float32"}, {"name": "542", "dtype": "float32"}, {"name": "543", "dtype": "float32"}, {"name": "544", "dtype": "float32"}, {"name": "545", "dtype": "float32"}, {"name": "546", "dtype": "float32"}, {"name": "547", "dtype": "float32"}, {"name": "548", "dtype": "float32"}, {"name": "549", "dtype": "float32"}, {"name": "550", "dtype": "float32"}, {"name": "551", "dtype": "float32"}, {"name": "552", "dtype": "float32"}, {"name": "553", "dtype": "float32"}, {"name": "554", "dtype": "float32"}, {"name": "555", "dtype": "float32"}, {"name": "556", "dtype": "float32"}, {"name": "557", "dtype": "float32"}, {"name": "558", "dtype": "float32"}, {"name": "559", "dtype": "float32"}, {"name": "560", "dtype": "float32"}, {"name": "561", "dtype": "float32"}, {"name": "562", "dtype": "float32"}, {"name": "563", "dtype": "float32"}, {"name": "564", "dtype": "float32"}, {"name": "565", "dtype": "float32"}, {"name": "566", "dtype": "float32"}, {"name": "567", "dtype": "float32"}, {"name": "568", "dtype": "float32"}, {"name": "569", "dtype": "float32"}, {"name": "570", "dtype": "float32"}, {"name": "571", "dtype": "float32"}, {"name": "572", "dtype": "float32"}, {"name": "573", "dtype": "float32"}, {"name": "574", "dtype": "float32"}, {"name": "575", "dtype": "float32"}, {"name": "576", "dtype": "float32"}, {"name": "577", "dtype": "float32"}, {"name": "578", "dtype": "float32"}, {"name": "579", "dtype": "float32"}, {"name": "580", "dtype": "float32"}, {"name": "581", "dtype": "float32"}, {"name": "582", "dtype": "float32"}, {"name": "583", "dtype": "float32"}, {"name": "584", "dtype": "float32"}, {"name": "585", "dtype": "float32"}, {"name": "586", "dtype": "float32"}, {"name": "587", "dtype": "float32"}, {"name": "588", "dtype": "float32"}, {"name": "589", "dtype": "float32"}, {"name": "590", "dtype": "float32"}, {"name": "591", "dtype": "float32"}, {"name": "592", "dtype": "float32"}, {"name": "593", "dtype": "float32"}, {"name": "594", "dtype": "float32"}, {"name": "595", "dtype": "float32"}, {"name": "596", "dtype": "float32"}, {"name": "597", "dtype": "float32"}, {"name": "598", "dtype": "float32"}, {"name": "599", "dtype": "float32"}, {"name": "600", "dtype": "float32"}, {"name": "601", "dtype": "float32"}, {"name": "602", "dtype": "float32"}, {"name": "603", "dtype": "float32"}, {"name": "604", "dtype": "float32"}, {"name": "605", "dtype": "float32"}, {"name": "606", "dtype": "float32"}, {"name": "607", "dtype": "float32"}, {"name": "608", "dtype": "float32"}, {"name": "609", "dtype": "float32"}, {"name": "610", "dtype": "float32"}, {"name": "611", "dtype": "float32"}, {"name": "612", "dtype": "float32"}, {"name": "613", "dtype": "float32"}, {"name": "614", "dtype": "float32"}, {"name": "615", "dtype": "float32"}, {"name": "616", "dtype": "float32"}, {"name": "617", "dtype": "float32"}, {"name": "618", "dtype": "float32"}, {"name": "619", "dtype": "float32"}, {"name": "620", "dtype": "float32"}, {"name": "621", "dtype": "float32"}, {"name": "622", "dtype": "float32"}, {"name": "623", "dtype": "float32"}, {"name": "624", "dtype": "float32"}, {"name": "625", "dtype": "float32"}, {"name": "626", "dtype": "float32"}, {"name": "627", "dtype": "float32"}, {"name": "628", "dtype": "float32"}, {"name": "629", "dtype": "float32"}, {"name": "630", "dtype": "float32"}, {"name": "631", "dtype": "float32"}, {"name": "632", "dtype": "float32"}, {"name": "633", "dtype": "float32"}, {"name": "634", "dtype": "float32"}, {"name": "635", "dtype": "float32"}, {"name": "636", "dtype": "float32"}, {"name": "637", "dtype": "float32"}, {"name": "638", "dtype": "float32"}, {"name": "639", "dtype": "float32"}, {"name": "640", "dtype": "float32"}, {"name": "641", "dtype": "float32"}, {"name": "642", "dtype": "float32"}, {"name": "643", "dtype": "float32"}, {"name": "644", "dtype": "float32"}, {"name": "645", "dtype": "float32"}, {"name": "646", "dtype": "float32"}, {"name": "647", "dtype": "float32"}, {"name": "648", "dtype": "float32"}, {"name": "649", "dtype": "float32"}, {"name": "650", "dtype": "float32"}, {"name": "651", "dtype": "float32"}, {"name": "652", "dtype": "float32"}, {"name": "653", "dtype": "float32"}, {"name": "654", "dtype": "float32"}, {"name": "655", "dtype": "float32"}, {"name": "656", "dtype": "float32"}, {"name": "657", "dtype": "float32"}, {"name": "658", "dtype": "float32"}, {"name": "659", "dtype": "float32"}, {"name": "660", "dtype": "float32"}, {"name": "661", "dtype": "float32"}, {"name": "662", "dtype": "float32"}, {"name": "663", "dtype": "float32"}, {"name": "664", "dtype": "float32"}, {"name": "665", "dtype": "float32"}, {"name": "666", "dtype": "float32"}, {"name": "667", "dtype": "float32"}, {"name": "668", "dtype": "float32"}, {"name": "669", "dtype": "float32"}, {"name": "670", "dtype": "float32"}, {"name": "671", "dtype": "float32"}, {"name": "672", "dtype": "float32"}, {"name": "673", "dtype": "float32"}, {"name": "674", "dtype": "float32"}, {"name": "675", "dtype": "float32"}, {"name": "676", "dtype": "float32"}, {"name": "677", "dtype": "float32"}, {"name": "678", "dtype": "float32"}, {"name": "679", "dtype": "float32"}, {"name": "680", "dtype": "float32"}, {"name": "681", "dtype": "float32"}, {"name": "682", "dtype": "float32"}, {"name": "683", "dtype": "float32"}, {"name": "684", "dtype": "float32"}, {"name": "685", "dtype": "float32"}, {"name": "686", "dtype": "float32"}, {"name": "687", "dtype": "float32"}, {"name": "688", "dtype": "float32"}, {"name": "689", "dtype": "float32"}, {"name": "690", "dtype": "float32"}, {"name": "691", "dtype": "float32"}, {"name": "692", "dtype": "float32"}, {"name": "693", "dtype": "float32"}, {"name": "694", "dtype": "float32"}, {"name": "695", "dtype": "float32"}, {"name": "696", "dtype": "float32"}, {"name": "697", "dtype": "float32"}, {"name": "698", "dtype": "float32"}, {"name": "699", "dtype": "float32"}, {"name": "700", "dtype": "float32"}, {"name": "701", "dtype": "float32"}, {"name": "702", "dtype": "float32"}, {"name": "703", "dtype": "float32"}, {"name": "704", "dtype": "float32"}, {"name": "705", "dtype": "float32"}, {"name": "706", "dtype": "float32"}, {"name": "707", "dtype": "float32"}, {"name": "708", "dtype": "float32"}, {"name": "709", "dtype": "float32"}, {"name": "710", "dtype": "float32"}, {"name": "711", "dtype": "float32"}, {"name": "712", "dtype": "float32"}, {"name": "713", "dtype": "float32"}, {"name": "714", "dtype": "float32"}, {"name": "715", "dtype": "float32"}, {"name": "716", "dtype": "float32"}, {"name": "717", "dtype": "float32"}, {"name": "718", "dtype": "float32"}, {"name": "719", "dtype": "float32"}, {"name": "720", "dtype": "float32"}, {"name": "721", "dtype": "float32"}, {"name": "722", "dtype": "float32"}, {"name": "723", "dtype": "float32"}, {"name": "724", "dtype": "float32"}, {"name": "725", "dtype": "float32"}, {"name": "726", "dtype": "float32"}, {"name": "727", "dtype": "float32"}, {"name": "728", "dtype": "float32"}, {"name": "729", "dtype": "float32"}, {"name": "730", "dtype": "float32"}, {"name": "731", "dtype": "float32"}, {"name": "732", "dtype": "float32"}, {"name": "733", "dtype": "float32"}, {"name": "734", "dtype": "float32"}, {"name": "735", "dtype": "float32"}, {"name": "736", "dtype": "float32"}, {"name": "737", "dtype": "float32"}, {"name": "738", "dtype": "float32"}, {"name": "739", "dtype": "float32"}, {"name": "740", "dtype": "float32"}, {"name": "741", "dtype": "float32"}, {"name": "742", "dtype": "float32"}, {"name": "743", "dtype": "float32"}, {"name": "744", "dtype": "float32"}, {"name": "745", "dtype": "float32"}, {"name": "746", "dtype": "float32"}, {"name": "747", "dtype": "float32"}, {"name": "748", "dtype": "float32"}, {"name": "749", "dtype": "float32"}, {"name": "750", "dtype": "float32"}, {"name": "751", "dtype": "float32"}, {"name": "752", "dtype": "float32"}, {"name": "753", "dtype": "float32"}, {"name": "754", "dtype": "float32"}, {"name": "755", "dtype": "float32"}, {"name": "756", "dtype": "float32"}, {"name": "757", "dtype": "float32"}, {"name": "758", "dtype": "float32"}, {"name": "759", "dtype": "float32"}, {"name": "760", "dtype": "float32"}, {"name": "761", "dtype": "float32"}, {"name": "762", "dtype": "float32"}, {"name": "763", "dtype": "float32"}, {"name": "764", "dtype": "float32"}, {"name": "765", "dtype": "float32"}, {"name": "766", "dtype": "float32"}, {"name": "767", "dtype": "float32"}, {"name": "label", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 115650065.625, "num_examples": 37500}, {"name": "test", "num_bytes": 38550020.0, "num_examples": 12500}], "download_size": 211763544, "dataset_size": 154200085.625}}
2023-08-23T04:10:06+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Spirit_BERT_Finetuned" More Information needed
[ "# Dataset Card for \"Spirit_BERT_Finetuned\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Spirit_BERT_Finetuned\"\n\nMore Information needed" ]
[ 6, 21 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"Spirit_BERT_Finetuned\"\n\nMore Information needed" ]
de83c44caae1147bc5b26d198222c2ea9e52350e
# Dataset Card for "Spirit_RoBERTa_Finetuned" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
EgilKarlsen/Spirit_RoBERTa_Finetuned
[ "region:us" ]
2023-08-18T17:13:35+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "0", "dtype": "float32"}, {"name": "1", "dtype": "float32"}, {"name": "2", "dtype": "float32"}, {"name": "3", "dtype": "float32"}, {"name": "4", "dtype": "float32"}, {"name": "5", "dtype": "float32"}, {"name": "6", "dtype": "float32"}, {"name": "7", "dtype": "float32"}, {"name": "8", "dtype": "float32"}, {"name": "9", "dtype": "float32"}, {"name": "10", "dtype": "float32"}, {"name": "11", "dtype": "float32"}, {"name": "12", "dtype": "float32"}, {"name": "13", "dtype": "float32"}, {"name": "14", "dtype": "float32"}, {"name": "15", "dtype": "float32"}, {"name": "16", "dtype": "float32"}, {"name": "17", "dtype": "float32"}, {"name": "18", "dtype": "float32"}, {"name": "19", "dtype": "float32"}, {"name": "20", "dtype": "float32"}, {"name": "21", "dtype": "float32"}, {"name": "22", "dtype": "float32"}, {"name": "23", "dtype": "float32"}, {"name": "24", "dtype": "float32"}, {"name": "25", "dtype": "float32"}, {"name": "26", "dtype": "float32"}, {"name": "27", "dtype": "float32"}, {"name": "28", "dtype": "float32"}, {"name": "29", "dtype": "float32"}, {"name": "30", "dtype": "float32"}, {"name": "31", "dtype": "float32"}, {"name": "32", "dtype": "float32"}, {"name": "33", "dtype": "float32"}, {"name": "34", "dtype": "float32"}, {"name": "35", "dtype": "float32"}, {"name": "36", "dtype": "float32"}, {"name": "37", "dtype": "float32"}, {"name": "38", "dtype": "float32"}, {"name": "39", "dtype": "float32"}, {"name": "40", "dtype": "float32"}, {"name": "41", "dtype": "float32"}, {"name": "42", "dtype": "float32"}, {"name": "43", "dtype": "float32"}, {"name": "44", "dtype": "float32"}, {"name": "45", "dtype": "float32"}, {"name": "46", "dtype": "float32"}, {"name": "47", "dtype": "float32"}, {"name": "48", "dtype": "float32"}, {"name": "49", "dtype": "float32"}, {"name": "50", "dtype": "float32"}, {"name": "51", "dtype": "float32"}, {"name": "52", "dtype": "float32"}, {"name": "53", "dtype": "float32"}, {"name": "54", "dtype": "float32"}, {"name": "55", "dtype": "float32"}, {"name": "56", "dtype": "float32"}, {"name": "57", "dtype": "float32"}, {"name": "58", "dtype": "float32"}, {"name": "59", "dtype": "float32"}, {"name": "60", "dtype": "float32"}, {"name": "61", "dtype": "float32"}, {"name": "62", "dtype": "float32"}, {"name": "63", "dtype": "float32"}, {"name": "64", "dtype": "float32"}, {"name": "65", "dtype": "float32"}, {"name": "66", "dtype": "float32"}, {"name": "67", "dtype": "float32"}, {"name": "68", "dtype": "float32"}, {"name": "69", "dtype": "float32"}, {"name": "70", "dtype": "float32"}, {"name": "71", "dtype": "float32"}, {"name": "72", "dtype": "float32"}, {"name": "73", "dtype": "float32"}, {"name": "74", "dtype": "float32"}, {"name": "75", "dtype": "float32"}, {"name": "76", "dtype": "float32"}, {"name": "77", "dtype": "float32"}, {"name": "78", "dtype": "float32"}, {"name": "79", "dtype": "float32"}, {"name": "80", "dtype": "float32"}, {"name": "81", "dtype": "float32"}, {"name": "82", "dtype": "float32"}, {"name": "83", "dtype": "float32"}, {"name": "84", "dtype": "float32"}, {"name": "85", "dtype": "float32"}, {"name": "86", "dtype": "float32"}, {"name": "87", "dtype": "float32"}, {"name": "88", "dtype": "float32"}, {"name": "89", "dtype": "float32"}, {"name": "90", "dtype": "float32"}, {"name": "91", "dtype": "float32"}, {"name": "92", "dtype": "float32"}, {"name": "93", "dtype": "float32"}, {"name": "94", "dtype": "float32"}, {"name": "95", "dtype": "float32"}, {"name": "96", "dtype": "float32"}, {"name": "97", "dtype": "float32"}, {"name": "98", "dtype": "float32"}, {"name": "99", "dtype": "float32"}, {"name": "100", "dtype": "float32"}, {"name": "101", "dtype": "float32"}, {"name": "102", "dtype": "float32"}, {"name": "103", "dtype": "float32"}, {"name": "104", "dtype": "float32"}, {"name": "105", "dtype": "float32"}, {"name": "106", "dtype": "float32"}, {"name": "107", "dtype": "float32"}, {"name": "108", "dtype": "float32"}, {"name": "109", "dtype": "float32"}, {"name": "110", "dtype": "float32"}, {"name": "111", "dtype": "float32"}, {"name": "112", "dtype": "float32"}, {"name": "113", "dtype": "float32"}, {"name": "114", "dtype": "float32"}, {"name": "115", "dtype": "float32"}, {"name": "116", "dtype": "float32"}, {"name": "117", "dtype": "float32"}, {"name": "118", "dtype": "float32"}, {"name": "119", "dtype": "float32"}, {"name": "120", "dtype": "float32"}, {"name": "121", "dtype": "float32"}, {"name": "122", "dtype": "float32"}, {"name": "123", "dtype": "float32"}, {"name": "124", "dtype": "float32"}, {"name": "125", "dtype": "float32"}, {"name": "126", "dtype": "float32"}, {"name": "127", "dtype": "float32"}, {"name": "128", "dtype": "float32"}, {"name": "129", "dtype": "float32"}, {"name": "130", "dtype": "float32"}, {"name": "131", "dtype": "float32"}, {"name": "132", "dtype": "float32"}, {"name": "133", "dtype": "float32"}, {"name": "134", "dtype": "float32"}, {"name": "135", "dtype": "float32"}, {"name": "136", "dtype": "float32"}, {"name": "137", "dtype": "float32"}, {"name": "138", "dtype": "float32"}, {"name": "139", "dtype": "float32"}, {"name": "140", "dtype": "float32"}, {"name": "141", "dtype": "float32"}, {"name": "142", "dtype": "float32"}, {"name": "143", "dtype": "float32"}, {"name": "144", "dtype": "float32"}, {"name": "145", "dtype": "float32"}, {"name": "146", "dtype": "float32"}, {"name": "147", "dtype": "float32"}, {"name": "148", "dtype": "float32"}, {"name": "149", "dtype": "float32"}, {"name": "150", "dtype": "float32"}, {"name": "151", "dtype": "float32"}, {"name": "152", "dtype": "float32"}, {"name": "153", "dtype": "float32"}, {"name": "154", "dtype": "float32"}, {"name": "155", "dtype": "float32"}, {"name": "156", "dtype": "float32"}, {"name": "157", "dtype": "float32"}, {"name": "158", "dtype": "float32"}, {"name": "159", "dtype": "float32"}, {"name": "160", "dtype": "float32"}, {"name": "161", "dtype": "float32"}, {"name": "162", "dtype": "float32"}, {"name": "163", "dtype": "float32"}, {"name": "164", "dtype": "float32"}, {"name": "165", "dtype": "float32"}, {"name": "166", "dtype": "float32"}, {"name": "167", "dtype": "float32"}, {"name": "168", "dtype": "float32"}, {"name": "169", "dtype": "float32"}, {"name": "170", "dtype": "float32"}, {"name": "171", "dtype": "float32"}, {"name": "172", "dtype": "float32"}, {"name": "173", "dtype": "float32"}, {"name": "174", "dtype": "float32"}, {"name": "175", "dtype": "float32"}, {"name": "176", "dtype": "float32"}, {"name": "177", "dtype": "float32"}, {"name": "178", "dtype": "float32"}, {"name": "179", "dtype": "float32"}, {"name": "180", "dtype": "float32"}, {"name": "181", "dtype": "float32"}, {"name": "182", "dtype": "float32"}, {"name": "183", "dtype": "float32"}, {"name": "184", "dtype": "float32"}, {"name": "185", "dtype": "float32"}, {"name": "186", "dtype": "float32"}, {"name": "187", "dtype": "float32"}, {"name": "188", "dtype": "float32"}, {"name": "189", "dtype": "float32"}, {"name": "190", "dtype": "float32"}, {"name": "191", "dtype": "float32"}, {"name": "192", "dtype": "float32"}, {"name": "193", "dtype": "float32"}, {"name": "194", "dtype": "float32"}, {"name": "195", "dtype": "float32"}, {"name": "196", "dtype": "float32"}, {"name": "197", "dtype": "float32"}, {"name": "198", "dtype": "float32"}, {"name": "199", "dtype": "float32"}, {"name": "200", "dtype": "float32"}, {"name": "201", "dtype": "float32"}, {"name": "202", "dtype": "float32"}, {"name": "203", "dtype": "float32"}, {"name": "204", "dtype": "float32"}, {"name": "205", "dtype": "float32"}, {"name": "206", "dtype": "float32"}, {"name": "207", "dtype": "float32"}, {"name": "208", "dtype": "float32"}, {"name": "209", "dtype": "float32"}, {"name": "210", "dtype": "float32"}, {"name": "211", "dtype": "float32"}, {"name": "212", "dtype": "float32"}, {"name": "213", "dtype": "float32"}, {"name": "214", "dtype": "float32"}, {"name": "215", "dtype": "float32"}, {"name": "216", "dtype": "float32"}, {"name": "217", "dtype": "float32"}, {"name": "218", "dtype": "float32"}, {"name": "219", "dtype": "float32"}, {"name": "220", "dtype": "float32"}, {"name": "221", "dtype": "float32"}, {"name": "222", "dtype": "float32"}, {"name": "223", "dtype": "float32"}, {"name": "224", "dtype": "float32"}, {"name": "225", "dtype": "float32"}, {"name": "226", "dtype": "float32"}, {"name": "227", "dtype": "float32"}, {"name": "228", "dtype": "float32"}, {"name": "229", "dtype": "float32"}, {"name": "230", "dtype": "float32"}, {"name": "231", "dtype": "float32"}, {"name": "232", "dtype": "float32"}, {"name": "233", "dtype": "float32"}, {"name": "234", "dtype": "float32"}, {"name": "235", "dtype": "float32"}, {"name": "236", "dtype": "float32"}, {"name": "237", "dtype": "float32"}, {"name": "238", "dtype": "float32"}, {"name": "239", "dtype": "float32"}, {"name": "240", "dtype": "float32"}, {"name": "241", "dtype": "float32"}, {"name": "242", "dtype": "float32"}, {"name": "243", "dtype": "float32"}, {"name": "244", "dtype": "float32"}, {"name": "245", "dtype": "float32"}, {"name": "246", "dtype": "float32"}, {"name": "247", "dtype": "float32"}, {"name": "248", "dtype": "float32"}, {"name": "249", "dtype": "float32"}, {"name": "250", "dtype": "float32"}, {"name": "251", "dtype": "float32"}, {"name": "252", "dtype": "float32"}, {"name": "253", "dtype": "float32"}, {"name": "254", "dtype": "float32"}, {"name": "255", "dtype": "float32"}, {"name": "256", "dtype": "float32"}, {"name": "257", "dtype": "float32"}, {"name": "258", "dtype": "float32"}, {"name": "259", "dtype": "float32"}, {"name": "260", "dtype": "float32"}, {"name": "261", "dtype": "float32"}, {"name": "262", "dtype": "float32"}, {"name": "263", "dtype": "float32"}, {"name": "264", "dtype": "float32"}, {"name": "265", "dtype": "float32"}, {"name": "266", "dtype": "float32"}, {"name": "267", "dtype": "float32"}, {"name": "268", "dtype": "float32"}, {"name": "269", "dtype": "float32"}, {"name": "270", "dtype": "float32"}, {"name": "271", "dtype": "float32"}, {"name": "272", "dtype": "float32"}, {"name": "273", "dtype": "float32"}, {"name": "274", "dtype": "float32"}, {"name": "275", "dtype": "float32"}, {"name": "276", "dtype": "float32"}, {"name": "277", "dtype": "float32"}, {"name": "278", "dtype": "float32"}, {"name": "279", "dtype": "float32"}, {"name": "280", "dtype": "float32"}, {"name": "281", "dtype": "float32"}, {"name": "282", "dtype": "float32"}, {"name": "283", "dtype": "float32"}, {"name": "284", "dtype": "float32"}, {"name": "285", "dtype": "float32"}, {"name": "286", "dtype": "float32"}, {"name": "287", "dtype": "float32"}, {"name": "288", "dtype": "float32"}, {"name": "289", "dtype": "float32"}, {"name": "290", "dtype": "float32"}, {"name": "291", "dtype": "float32"}, {"name": "292", "dtype": "float32"}, {"name": "293", "dtype": "float32"}, {"name": "294", "dtype": "float32"}, {"name": "295", "dtype": "float32"}, {"name": "296", "dtype": "float32"}, {"name": "297", "dtype": "float32"}, {"name": "298", "dtype": "float32"}, {"name": "299", "dtype": "float32"}, {"name": "300", "dtype": "float32"}, {"name": "301", "dtype": "float32"}, {"name": "302", "dtype": "float32"}, {"name": "303", "dtype": "float32"}, {"name": "304", "dtype": "float32"}, {"name": "305", "dtype": "float32"}, {"name": "306", "dtype": "float32"}, {"name": "307", "dtype": "float32"}, {"name": "308", "dtype": "float32"}, {"name": "309", "dtype": "float32"}, {"name": "310", "dtype": "float32"}, {"name": "311", "dtype": "float32"}, {"name": "312", "dtype": "float32"}, {"name": "313", "dtype": "float32"}, {"name": "314", "dtype": "float32"}, {"name": "315", "dtype": "float32"}, {"name": "316", "dtype": "float32"}, {"name": "317", "dtype": "float32"}, {"name": "318", "dtype": "float32"}, {"name": "319", "dtype": "float32"}, {"name": "320", "dtype": "float32"}, {"name": "321", "dtype": "float32"}, {"name": "322", "dtype": "float32"}, {"name": "323", "dtype": "float32"}, {"name": "324", "dtype": "float32"}, {"name": "325", "dtype": "float32"}, {"name": "326", "dtype": "float32"}, {"name": "327", "dtype": "float32"}, {"name": "328", "dtype": "float32"}, {"name": "329", "dtype": "float32"}, {"name": "330", "dtype": "float32"}, {"name": "331", "dtype": "float32"}, {"name": "332", "dtype": "float32"}, {"name": "333", "dtype": "float32"}, {"name": "334", "dtype": "float32"}, {"name": "335", "dtype": "float32"}, {"name": "336", "dtype": "float32"}, {"name": "337", "dtype": "float32"}, {"name": "338", "dtype": "float32"}, {"name": "339", "dtype": "float32"}, {"name": "340", "dtype": "float32"}, {"name": "341", "dtype": "float32"}, {"name": "342", "dtype": "float32"}, {"name": "343", "dtype": "float32"}, {"name": "344", "dtype": "float32"}, {"name": "345", "dtype": "float32"}, {"name": "346", "dtype": "float32"}, {"name": "347", "dtype": "float32"}, {"name": "348", "dtype": "float32"}, {"name": "349", "dtype": "float32"}, {"name": "350", "dtype": "float32"}, {"name": "351", "dtype": "float32"}, {"name": "352", "dtype": "float32"}, {"name": "353", "dtype": "float32"}, {"name": "354", "dtype": "float32"}, {"name": "355", "dtype": "float32"}, {"name": "356", "dtype": "float32"}, {"name": "357", "dtype": "float32"}, {"name": "358", "dtype": "float32"}, {"name": "359", "dtype": "float32"}, {"name": "360", "dtype": "float32"}, {"name": "361", "dtype": "float32"}, {"name": "362", "dtype": "float32"}, {"name": "363", "dtype": "float32"}, {"name": "364", "dtype": "float32"}, {"name": "365", "dtype": "float32"}, {"name": "366", "dtype": "float32"}, {"name": "367", "dtype": "float32"}, {"name": "368", "dtype": "float32"}, {"name": "369", "dtype": "float32"}, {"name": "370", "dtype": "float32"}, {"name": "371", "dtype": "float32"}, {"name": "372", "dtype": "float32"}, {"name": "373", "dtype": "float32"}, {"name": "374", "dtype": "float32"}, {"name": "375", "dtype": "float32"}, {"name": "376", "dtype": "float32"}, {"name": "377", "dtype": "float32"}, {"name": "378", "dtype": "float32"}, {"name": "379", "dtype": "float32"}, {"name": "380", "dtype": "float32"}, {"name": "381", "dtype": "float32"}, {"name": "382", "dtype": "float32"}, {"name": "383", "dtype": "float32"}, {"name": "384", "dtype": "float32"}, {"name": "385", "dtype": "float32"}, {"name": "386", "dtype": "float32"}, {"name": "387", "dtype": "float32"}, {"name": "388", "dtype": "float32"}, {"name": "389", "dtype": "float32"}, {"name": "390", "dtype": "float32"}, {"name": "391", "dtype": "float32"}, {"name": "392", "dtype": "float32"}, {"name": "393", "dtype": "float32"}, {"name": "394", "dtype": "float32"}, {"name": "395", "dtype": "float32"}, {"name": "396", "dtype": "float32"}, {"name": "397", "dtype": "float32"}, {"name": "398", "dtype": "float32"}, {"name": "399", "dtype": "float32"}, {"name": "400", "dtype": "float32"}, {"name": "401", "dtype": "float32"}, {"name": "402", "dtype": "float32"}, {"name": "403", "dtype": "float32"}, {"name": "404", "dtype": "float32"}, {"name": "405", "dtype": "float32"}, {"name": "406", "dtype": "float32"}, {"name": "407", "dtype": "float32"}, {"name": "408", "dtype": "float32"}, {"name": "409", "dtype": "float32"}, {"name": "410", "dtype": "float32"}, {"name": "411", "dtype": "float32"}, {"name": "412", "dtype": "float32"}, {"name": "413", "dtype": "float32"}, {"name": "414", "dtype": "float32"}, {"name": "415", "dtype": "float32"}, {"name": "416", "dtype": "float32"}, {"name": "417", "dtype": "float32"}, {"name": "418", "dtype": "float32"}, {"name": "419", "dtype": "float32"}, {"name": "420", "dtype": "float32"}, {"name": "421", "dtype": "float32"}, {"name": "422", "dtype": "float32"}, {"name": "423", "dtype": "float32"}, {"name": "424", "dtype": "float32"}, {"name": "425", "dtype": "float32"}, {"name": "426", "dtype": "float32"}, {"name": "427", "dtype": "float32"}, {"name": "428", "dtype": "float32"}, {"name": "429", "dtype": "float32"}, {"name": "430", "dtype": "float32"}, {"name": "431", "dtype": "float32"}, {"name": "432", "dtype": "float32"}, {"name": "433", "dtype": "float32"}, {"name": "434", "dtype": "float32"}, {"name": "435", "dtype": "float32"}, {"name": "436", "dtype": "float32"}, {"name": "437", "dtype": "float32"}, {"name": "438", "dtype": "float32"}, {"name": "439", "dtype": "float32"}, {"name": "440", "dtype": "float32"}, {"name": "441", "dtype": "float32"}, {"name": "442", "dtype": "float32"}, {"name": "443", "dtype": "float32"}, {"name": "444", "dtype": "float32"}, {"name": "445", "dtype": "float32"}, {"name": "446", "dtype": "float32"}, {"name": "447", "dtype": "float32"}, {"name": "448", "dtype": "float32"}, {"name": "449", "dtype": "float32"}, {"name": "450", "dtype": "float32"}, {"name": "451", "dtype": "float32"}, {"name": "452", "dtype": "float32"}, {"name": "453", "dtype": "float32"}, {"name": "454", "dtype": "float32"}, {"name": "455", "dtype": "float32"}, {"name": "456", "dtype": "float32"}, {"name": "457", "dtype": "float32"}, {"name": "458", "dtype": "float32"}, {"name": "459", "dtype": "float32"}, {"name": "460", "dtype": "float32"}, {"name": "461", "dtype": "float32"}, {"name": "462", "dtype": "float32"}, {"name": "463", "dtype": "float32"}, {"name": "464", "dtype": "float32"}, {"name": "465", "dtype": "float32"}, {"name": "466", "dtype": "float32"}, {"name": "467", "dtype": "float32"}, {"name": "468", "dtype": "float32"}, {"name": "469", "dtype": "float32"}, {"name": "470", "dtype": "float32"}, {"name": "471", "dtype": "float32"}, {"name": "472", "dtype": "float32"}, {"name": "473", "dtype": "float32"}, {"name": "474", "dtype": "float32"}, {"name": "475", "dtype": "float32"}, {"name": "476", "dtype": "float32"}, {"name": "477", "dtype": "float32"}, {"name": "478", "dtype": "float32"}, {"name": "479", "dtype": "float32"}, {"name": "480", "dtype": "float32"}, {"name": "481", "dtype": "float32"}, {"name": "482", "dtype": "float32"}, {"name": "483", "dtype": "float32"}, {"name": "484", "dtype": "float32"}, {"name": "485", "dtype": "float32"}, {"name": "486", "dtype": "float32"}, {"name": "487", "dtype": "float32"}, {"name": "488", "dtype": "float32"}, {"name": "489", "dtype": "float32"}, {"name": "490", "dtype": "float32"}, {"name": "491", "dtype": "float32"}, {"name": "492", "dtype": "float32"}, {"name": "493", "dtype": "float32"}, {"name": "494", "dtype": "float32"}, {"name": "495", "dtype": "float32"}, {"name": "496", "dtype": "float32"}, {"name": "497", "dtype": "float32"}, {"name": "498", "dtype": "float32"}, {"name": "499", "dtype": "float32"}, {"name": "500", "dtype": "float32"}, {"name": "501", "dtype": "float32"}, {"name": "502", "dtype": "float32"}, {"name": "503", "dtype": "float32"}, {"name": "504", "dtype": "float32"}, {"name": "505", "dtype": "float32"}, {"name": "506", "dtype": "float32"}, {"name": "507", "dtype": "float32"}, {"name": "508", "dtype": "float32"}, {"name": "509", "dtype": "float32"}, {"name": "510", "dtype": "float32"}, {"name": "511", "dtype": "float32"}, {"name": "512", "dtype": "float32"}, {"name": "513", "dtype": "float32"}, {"name": "514", "dtype": "float32"}, {"name": "515", "dtype": "float32"}, {"name": "516", "dtype": "float32"}, {"name": "517", "dtype": "float32"}, {"name": "518", "dtype": "float32"}, {"name": "519", "dtype": "float32"}, {"name": "520", "dtype": "float32"}, {"name": "521", "dtype": "float32"}, {"name": "522", "dtype": "float32"}, {"name": "523", "dtype": "float32"}, {"name": "524", "dtype": "float32"}, {"name": "525", "dtype": "float32"}, {"name": "526", "dtype": "float32"}, {"name": "527", "dtype": "float32"}, {"name": "528", "dtype": "float32"}, {"name": "529", "dtype": "float32"}, {"name": "530", "dtype": "float32"}, {"name": "531", "dtype": "float32"}, {"name": "532", "dtype": "float32"}, {"name": "533", "dtype": "float32"}, {"name": "534", "dtype": "float32"}, {"name": "535", "dtype": "float32"}, {"name": "536", "dtype": "float32"}, {"name": "537", "dtype": "float32"}, {"name": "538", "dtype": "float32"}, {"name": "539", "dtype": "float32"}, {"name": "540", "dtype": "float32"}, {"name": "541", "dtype": "float32"}, {"name": "542", "dtype": "float32"}, {"name": "543", "dtype": "float32"}, {"name": "544", "dtype": "float32"}, {"name": "545", "dtype": "float32"}, {"name": "546", "dtype": "float32"}, {"name": "547", "dtype": "float32"}, {"name": "548", "dtype": "float32"}, {"name": "549", "dtype": "float32"}, {"name": "550", "dtype": "float32"}, {"name": "551", "dtype": "float32"}, {"name": "552", "dtype": "float32"}, {"name": "553", "dtype": "float32"}, {"name": "554", "dtype": "float32"}, {"name": "555", "dtype": "float32"}, {"name": "556", "dtype": "float32"}, {"name": "557", "dtype": "float32"}, {"name": "558", "dtype": "float32"}, {"name": "559", "dtype": "float32"}, {"name": "560", "dtype": "float32"}, {"name": "561", "dtype": "float32"}, {"name": "562", "dtype": "float32"}, {"name": "563", "dtype": "float32"}, {"name": "564", "dtype": "float32"}, {"name": "565", "dtype": "float32"}, {"name": "566", "dtype": "float32"}, {"name": "567", "dtype": "float32"}, {"name": "568", "dtype": "float32"}, {"name": "569", "dtype": "float32"}, {"name": "570", "dtype": "float32"}, {"name": "571", "dtype": "float32"}, {"name": "572", "dtype": "float32"}, {"name": "573", "dtype": "float32"}, {"name": "574", "dtype": "float32"}, {"name": "575", "dtype": "float32"}, {"name": "576", "dtype": "float32"}, {"name": "577", "dtype": "float32"}, {"name": "578", "dtype": "float32"}, {"name": "579", "dtype": "float32"}, {"name": "580", "dtype": "float32"}, {"name": "581", "dtype": "float32"}, {"name": "582", "dtype": "float32"}, {"name": "583", "dtype": "float32"}, {"name": "584", "dtype": "float32"}, {"name": "585", "dtype": "float32"}, {"name": "586", "dtype": "float32"}, {"name": "587", "dtype": "float32"}, {"name": "588", "dtype": "float32"}, {"name": "589", "dtype": "float32"}, {"name": "590", "dtype": "float32"}, {"name": "591", "dtype": "float32"}, {"name": "592", "dtype": "float32"}, {"name": "593", "dtype": "float32"}, {"name": "594", "dtype": "float32"}, {"name": "595", "dtype": "float32"}, {"name": "596", "dtype": "float32"}, {"name": "597", "dtype": "float32"}, {"name": "598", "dtype": "float32"}, {"name": "599", "dtype": "float32"}, {"name": "600", "dtype": "float32"}, {"name": "601", "dtype": "float32"}, {"name": "602", "dtype": "float32"}, {"name": "603", "dtype": "float32"}, {"name": "604", "dtype": "float32"}, {"name": "605", "dtype": "float32"}, {"name": "606", "dtype": "float32"}, {"name": "607", "dtype": "float32"}, {"name": "608", "dtype": "float32"}, {"name": "609", "dtype": "float32"}, {"name": "610", "dtype": "float32"}, {"name": "611", "dtype": "float32"}, {"name": "612", "dtype": "float32"}, {"name": "613", "dtype": "float32"}, {"name": "614", "dtype": "float32"}, {"name": "615", "dtype": "float32"}, {"name": "616", "dtype": "float32"}, {"name": "617", "dtype": "float32"}, {"name": "618", "dtype": "float32"}, {"name": "619", "dtype": "float32"}, {"name": "620", "dtype": "float32"}, {"name": "621", "dtype": "float32"}, {"name": "622", "dtype": "float32"}, {"name": "623", "dtype": "float32"}, {"name": "624", "dtype": "float32"}, {"name": "625", "dtype": "float32"}, {"name": "626", "dtype": "float32"}, {"name": "627", "dtype": "float32"}, {"name": "628", "dtype": "float32"}, {"name": "629", "dtype": "float32"}, {"name": "630", "dtype": "float32"}, {"name": "631", "dtype": "float32"}, {"name": "632", "dtype": "float32"}, {"name": "633", "dtype": "float32"}, {"name": "634", "dtype": "float32"}, {"name": "635", "dtype": "float32"}, {"name": "636", "dtype": "float32"}, {"name": "637", "dtype": "float32"}, {"name": "638", "dtype": "float32"}, {"name": "639", "dtype": "float32"}, {"name": "640", "dtype": "float32"}, {"name": "641", "dtype": "float32"}, {"name": "642", "dtype": "float32"}, {"name": "643", "dtype": "float32"}, {"name": "644", "dtype": "float32"}, {"name": "645", "dtype": "float32"}, {"name": "646", "dtype": "float32"}, {"name": "647", "dtype": "float32"}, {"name": "648", "dtype": "float32"}, {"name": "649", "dtype": "float32"}, {"name": "650", "dtype": "float32"}, {"name": "651", "dtype": "float32"}, {"name": "652", "dtype": "float32"}, {"name": "653", "dtype": "float32"}, {"name": "654", "dtype": "float32"}, {"name": "655", "dtype": "float32"}, {"name": "656", "dtype": "float32"}, {"name": "657", "dtype": "float32"}, {"name": "658", "dtype": "float32"}, {"name": "659", "dtype": "float32"}, {"name": "660", "dtype": "float32"}, {"name": "661", "dtype": "float32"}, {"name": "662", "dtype": "float32"}, {"name": "663", "dtype": "float32"}, {"name": "664", "dtype": "float32"}, {"name": "665", "dtype": "float32"}, {"name": "666", "dtype": "float32"}, {"name": "667", "dtype": "float32"}, {"name": "668", "dtype": "float32"}, {"name": "669", "dtype": "float32"}, {"name": "670", "dtype": "float32"}, {"name": "671", "dtype": "float32"}, {"name": "672", "dtype": "float32"}, {"name": "673", "dtype": "float32"}, {"name": "674", "dtype": "float32"}, {"name": "675", "dtype": "float32"}, {"name": "676", "dtype": "float32"}, {"name": "677", "dtype": "float32"}, {"name": "678", "dtype": "float32"}, {"name": "679", "dtype": "float32"}, {"name": "680", "dtype": "float32"}, {"name": "681", "dtype": "float32"}, {"name": "682", "dtype": "float32"}, {"name": "683", "dtype": "float32"}, {"name": "684", "dtype": "float32"}, {"name": "685", "dtype": "float32"}, {"name": "686", "dtype": "float32"}, {"name": "687", "dtype": "float32"}, {"name": "688", "dtype": "float32"}, {"name": "689", "dtype": "float32"}, {"name": "690", "dtype": "float32"}, {"name": "691", "dtype": "float32"}, {"name": "692", "dtype": "float32"}, {"name": "693", "dtype": "float32"}, {"name": "694", "dtype": "float32"}, {"name": "695", "dtype": "float32"}, {"name": "696", "dtype": "float32"}, {"name": "697", "dtype": "float32"}, {"name": "698", "dtype": "float32"}, {"name": "699", "dtype": "float32"}, {"name": "700", "dtype": "float32"}, {"name": "701", "dtype": "float32"}, {"name": "702", "dtype": "float32"}, {"name": "703", "dtype": "float32"}, {"name": "704", "dtype": "float32"}, {"name": "705", "dtype": "float32"}, {"name": "706", "dtype": "float32"}, {"name": "707", "dtype": "float32"}, {"name": "708", "dtype": "float32"}, {"name": "709", "dtype": "float32"}, {"name": "710", "dtype": "float32"}, {"name": "711", "dtype": "float32"}, {"name": "712", "dtype": "float32"}, {"name": "713", "dtype": "float32"}, {"name": "714", "dtype": "float32"}, {"name": "715", "dtype": "float32"}, {"name": "716", "dtype": "float32"}, {"name": "717", "dtype": "float32"}, {"name": "718", "dtype": "float32"}, {"name": "719", "dtype": "float32"}, {"name": "720", "dtype": "float32"}, {"name": "721", "dtype": "float32"}, {"name": "722", "dtype": "float32"}, {"name": "723", "dtype": "float32"}, {"name": "724", "dtype": "float32"}, {"name": "725", "dtype": "float32"}, {"name": "726", "dtype": "float32"}, {"name": "727", "dtype": "float32"}, {"name": "728", "dtype": "float32"}, {"name": "729", "dtype": "float32"}, {"name": "730", "dtype": "float32"}, {"name": "731", "dtype": "float32"}, {"name": "732", "dtype": "float32"}, {"name": "733", "dtype": "float32"}, {"name": "734", "dtype": "float32"}, {"name": "735", "dtype": "float32"}, {"name": "736", "dtype": "float32"}, {"name": "737", "dtype": "float32"}, {"name": "738", "dtype": "float32"}, {"name": "739", "dtype": "float32"}, {"name": "740", "dtype": "float32"}, {"name": "741", "dtype": "float32"}, {"name": "742", "dtype": "float32"}, {"name": "743", "dtype": "float32"}, {"name": "744", "dtype": "float32"}, {"name": "745", "dtype": "float32"}, {"name": "746", "dtype": "float32"}, {"name": "747", "dtype": "float32"}, {"name": "748", "dtype": "float32"}, {"name": "749", "dtype": "float32"}, {"name": "750", "dtype": "float32"}, {"name": "751", "dtype": "float32"}, {"name": "752", "dtype": "float32"}, {"name": "753", "dtype": "float32"}, {"name": "754", "dtype": "float32"}, {"name": "755", "dtype": "float32"}, {"name": "756", "dtype": "float32"}, {"name": "757", "dtype": "float32"}, {"name": "758", "dtype": "float32"}, {"name": "759", "dtype": "float32"}, {"name": "760", "dtype": "float32"}, {"name": "761", "dtype": "float32"}, {"name": "762", "dtype": "float32"}, {"name": "763", "dtype": "float32"}, {"name": "764", "dtype": "float32"}, {"name": "765", "dtype": "float32"}, {"name": "766", "dtype": "float32"}, {"name": "767", "dtype": "float32"}, {"name": "label", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 115650065.625, "num_examples": 37500}, {"name": "test", "num_bytes": 38550020.0, "num_examples": 12500}], "download_size": 211788382, "dataset_size": 154200085.625}}
2023-08-23T04:16:58+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Spirit_RoBERTa_Finetuned" More Information needed
[ "# Dataset Card for \"Spirit_RoBERTa_Finetuned\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Spirit_RoBERTa_Finetuned\"\n\nMore Information needed" ]
[ 6, 22 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"Spirit_RoBERTa_Finetuned\"\n\nMore Information needed" ]
da69c2b012bba0bcc4ab14869af27e535ab5c2f6
# Dataset Card for "Spirit_DistilRoBERTa_Finetuned" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
EgilKarlsen/Spirit_DistilRoBERTa_Finetuned
[ "region:us" ]
2023-08-18T17:19:47+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "0", "dtype": "float32"}, {"name": "1", "dtype": "float32"}, {"name": "2", "dtype": "float32"}, {"name": "3", "dtype": "float32"}, {"name": "4", "dtype": "float32"}, {"name": "5", "dtype": "float32"}, {"name": "6", "dtype": "float32"}, {"name": "7", "dtype": "float32"}, {"name": "8", "dtype": "float32"}, {"name": "9", "dtype": "float32"}, {"name": "10", "dtype": "float32"}, {"name": "11", "dtype": "float32"}, {"name": "12", "dtype": "float32"}, {"name": "13", "dtype": "float32"}, {"name": "14", "dtype": "float32"}, {"name": "15", "dtype": "float32"}, {"name": "16", "dtype": "float32"}, {"name": "17", "dtype": "float32"}, {"name": "18", "dtype": "float32"}, {"name": "19", "dtype": "float32"}, {"name": "20", "dtype": "float32"}, {"name": "21", "dtype": "float32"}, {"name": "22", "dtype": "float32"}, {"name": "23", "dtype": "float32"}, {"name": "24", "dtype": "float32"}, {"name": "25", "dtype": "float32"}, {"name": "26", "dtype": "float32"}, {"name": "27", "dtype": "float32"}, {"name": "28", "dtype": "float32"}, {"name": "29", "dtype": "float32"}, {"name": "30", "dtype": "float32"}, {"name": "31", "dtype": "float32"}, {"name": "32", "dtype": "float32"}, {"name": "33", "dtype": "float32"}, {"name": "34", "dtype": "float32"}, {"name": "35", "dtype": "float32"}, {"name": "36", "dtype": "float32"}, {"name": "37", "dtype": "float32"}, {"name": "38", "dtype": "float32"}, {"name": "39", "dtype": "float32"}, {"name": "40", "dtype": "float32"}, {"name": "41", "dtype": "float32"}, {"name": "42", "dtype": "float32"}, {"name": "43", "dtype": "float32"}, {"name": "44", "dtype": "float32"}, {"name": "45", "dtype": "float32"}, {"name": "46", "dtype": "float32"}, {"name": "47", "dtype": "float32"}, {"name": "48", "dtype": "float32"}, {"name": "49", "dtype": "float32"}, {"name": "50", "dtype": "float32"}, {"name": "51", "dtype": "float32"}, {"name": "52", "dtype": "float32"}, {"name": "53", "dtype": "float32"}, {"name": "54", "dtype": "float32"}, {"name": "55", "dtype": "float32"}, {"name": "56", "dtype": "float32"}, {"name": "57", "dtype": "float32"}, {"name": "58", "dtype": "float32"}, {"name": "59", "dtype": "float32"}, {"name": "60", "dtype": "float32"}, {"name": "61", "dtype": "float32"}, {"name": "62", "dtype": "float32"}, {"name": "63", "dtype": "float32"}, {"name": "64", "dtype": "float32"}, {"name": "65", "dtype": "float32"}, {"name": "66", "dtype": "float32"}, {"name": "67", "dtype": "float32"}, {"name": "68", "dtype": "float32"}, {"name": "69", "dtype": "float32"}, {"name": "70", "dtype": "float32"}, {"name": "71", "dtype": "float32"}, {"name": "72", "dtype": "float32"}, {"name": "73", "dtype": "float32"}, {"name": "74", "dtype": "float32"}, {"name": "75", "dtype": "float32"}, {"name": "76", "dtype": "float32"}, {"name": "77", "dtype": "float32"}, {"name": "78", "dtype": "float32"}, {"name": "79", "dtype": "float32"}, {"name": "80", "dtype": "float32"}, {"name": "81", "dtype": "float32"}, {"name": "82", "dtype": "float32"}, {"name": "83", "dtype": "float32"}, {"name": "84", "dtype": "float32"}, {"name": "85", "dtype": "float32"}, {"name": "86", "dtype": "float32"}, {"name": "87", "dtype": "float32"}, {"name": "88", "dtype": "float32"}, {"name": "89", "dtype": "float32"}, {"name": "90", "dtype": "float32"}, {"name": "91", "dtype": "float32"}, {"name": "92", "dtype": "float32"}, {"name": "93", "dtype": "float32"}, {"name": "94", "dtype": "float32"}, {"name": "95", "dtype": "float32"}, {"name": "96", "dtype": "float32"}, {"name": "97", "dtype": "float32"}, {"name": "98", "dtype": "float32"}, {"name": "99", "dtype": "float32"}, {"name": "100", "dtype": "float32"}, {"name": "101", "dtype": "float32"}, {"name": "102", "dtype": "float32"}, {"name": "103", "dtype": "float32"}, {"name": "104", "dtype": "float32"}, {"name": "105", "dtype": "float32"}, {"name": "106", "dtype": "float32"}, {"name": "107", "dtype": "float32"}, {"name": "108", "dtype": "float32"}, {"name": "109", "dtype": "float32"}, {"name": "110", "dtype": "float32"}, {"name": "111", "dtype": "float32"}, {"name": "112", "dtype": "float32"}, {"name": "113", "dtype": "float32"}, {"name": "114", "dtype": "float32"}, {"name": "115", "dtype": "float32"}, {"name": "116", "dtype": "float32"}, {"name": "117", "dtype": "float32"}, {"name": "118", "dtype": "float32"}, {"name": "119", "dtype": "float32"}, {"name": "120", "dtype": "float32"}, {"name": "121", "dtype": "float32"}, {"name": "122", "dtype": "float32"}, {"name": "123", "dtype": "float32"}, {"name": "124", "dtype": "float32"}, {"name": "125", "dtype": "float32"}, {"name": "126", "dtype": "float32"}, {"name": "127", "dtype": "float32"}, {"name": "128", "dtype": "float32"}, {"name": "129", "dtype": "float32"}, {"name": "130", "dtype": "float32"}, {"name": "131", "dtype": "float32"}, {"name": "132", "dtype": "float32"}, {"name": "133", "dtype": "float32"}, {"name": "134", "dtype": "float32"}, {"name": "135", "dtype": "float32"}, {"name": "136", "dtype": "float32"}, {"name": "137", "dtype": "float32"}, {"name": "138", "dtype": "float32"}, {"name": "139", "dtype": "float32"}, {"name": "140", "dtype": "float32"}, {"name": "141", "dtype": "float32"}, {"name": "142", "dtype": "float32"}, {"name": "143", "dtype": "float32"}, {"name": "144", "dtype": "float32"}, {"name": "145", "dtype": "float32"}, {"name": "146", "dtype": "float32"}, {"name": "147", "dtype": "float32"}, {"name": "148", "dtype": "float32"}, {"name": "149", "dtype": "float32"}, {"name": "150", "dtype": "float32"}, {"name": "151", "dtype": "float32"}, {"name": "152", "dtype": "float32"}, {"name": "153", "dtype": "float32"}, {"name": "154", "dtype": "float32"}, {"name": "155", "dtype": "float32"}, {"name": "156", "dtype": "float32"}, {"name": "157", "dtype": "float32"}, {"name": "158", "dtype": "float32"}, {"name": "159", "dtype": "float32"}, {"name": "160", "dtype": "float32"}, {"name": "161", "dtype": "float32"}, {"name": "162", "dtype": "float32"}, {"name": "163", "dtype": "float32"}, {"name": "164", "dtype": "float32"}, {"name": "165", "dtype": "float32"}, {"name": "166", "dtype": "float32"}, {"name": "167", "dtype": "float32"}, {"name": "168", "dtype": "float32"}, {"name": "169", "dtype": "float32"}, {"name": "170", "dtype": "float32"}, {"name": "171", "dtype": "float32"}, {"name": "172", "dtype": "float32"}, {"name": "173", "dtype": "float32"}, {"name": "174", "dtype": "float32"}, {"name": "175", "dtype": "float32"}, {"name": "176", "dtype": "float32"}, {"name": "177", "dtype": "float32"}, {"name": "178", "dtype": "float32"}, {"name": "179", "dtype": "float32"}, {"name": "180", "dtype": "float32"}, {"name": "181", "dtype": "float32"}, {"name": "182", "dtype": "float32"}, {"name": "183", "dtype": "float32"}, {"name": "184", "dtype": "float32"}, {"name": "185", "dtype": "float32"}, {"name": "186", "dtype": "float32"}, {"name": "187", "dtype": "float32"}, {"name": "188", "dtype": "float32"}, {"name": "189", "dtype": "float32"}, {"name": "190", "dtype": "float32"}, {"name": "191", "dtype": "float32"}, {"name": "192", "dtype": "float32"}, {"name": "193", "dtype": "float32"}, {"name": "194", "dtype": "float32"}, {"name": "195", "dtype": "float32"}, {"name": "196", "dtype": "float32"}, {"name": "197", "dtype": "float32"}, {"name": "198", "dtype": "float32"}, {"name": "199", "dtype": "float32"}, {"name": "200", "dtype": "float32"}, {"name": "201", "dtype": "float32"}, {"name": "202", "dtype": "float32"}, {"name": "203", "dtype": "float32"}, {"name": "204", "dtype": "float32"}, {"name": "205", "dtype": "float32"}, {"name": "206", "dtype": "float32"}, {"name": "207", "dtype": "float32"}, {"name": "208", "dtype": "float32"}, {"name": "209", "dtype": "float32"}, {"name": "210", "dtype": "float32"}, {"name": "211", "dtype": "float32"}, {"name": "212", "dtype": "float32"}, {"name": "213", "dtype": "float32"}, {"name": "214", "dtype": "float32"}, {"name": "215", "dtype": "float32"}, {"name": "216", "dtype": "float32"}, {"name": "217", "dtype": "float32"}, {"name": "218", "dtype": "float32"}, {"name": "219", "dtype": "float32"}, {"name": "220", "dtype": "float32"}, {"name": "221", "dtype": "float32"}, {"name": "222", "dtype": "float32"}, {"name": "223", "dtype": "float32"}, {"name": "224", "dtype": "float32"}, {"name": "225", "dtype": "float32"}, {"name": "226", "dtype": "float32"}, {"name": "227", "dtype": "float32"}, {"name": "228", "dtype": "float32"}, {"name": "229", "dtype": "float32"}, {"name": "230", "dtype": "float32"}, {"name": "231", "dtype": "float32"}, {"name": "232", "dtype": "float32"}, {"name": "233", "dtype": "float32"}, {"name": "234", "dtype": "float32"}, {"name": "235", "dtype": "float32"}, {"name": "236", "dtype": "float32"}, {"name": "237", "dtype": "float32"}, {"name": "238", "dtype": "float32"}, {"name": "239", "dtype": "float32"}, {"name": "240", "dtype": "float32"}, {"name": "241", "dtype": "float32"}, {"name": "242", "dtype": "float32"}, {"name": "243", "dtype": "float32"}, {"name": "244", "dtype": "float32"}, {"name": "245", "dtype": "float32"}, {"name": "246", "dtype": "float32"}, {"name": "247", "dtype": "float32"}, {"name": "248", "dtype": "float32"}, {"name": "249", "dtype": "float32"}, {"name": "250", "dtype": "float32"}, {"name": "251", "dtype": "float32"}, {"name": "252", "dtype": "float32"}, {"name": "253", "dtype": "float32"}, {"name": "254", "dtype": "float32"}, {"name": "255", "dtype": "float32"}, {"name": "256", "dtype": "float32"}, {"name": "257", "dtype": "float32"}, {"name": "258", "dtype": "float32"}, {"name": "259", "dtype": "float32"}, {"name": "260", "dtype": "float32"}, {"name": "261", "dtype": "float32"}, {"name": "262", "dtype": "float32"}, {"name": "263", "dtype": "float32"}, {"name": "264", "dtype": "float32"}, {"name": "265", "dtype": "float32"}, {"name": "266", "dtype": "float32"}, {"name": "267", "dtype": "float32"}, {"name": "268", "dtype": "float32"}, {"name": "269", "dtype": "float32"}, {"name": "270", "dtype": "float32"}, {"name": "271", "dtype": "float32"}, {"name": "272", "dtype": "float32"}, {"name": "273", "dtype": "float32"}, {"name": "274", "dtype": "float32"}, {"name": "275", "dtype": "float32"}, {"name": "276", "dtype": "float32"}, {"name": "277", "dtype": "float32"}, {"name": "278", "dtype": "float32"}, {"name": "279", "dtype": "float32"}, {"name": "280", "dtype": "float32"}, {"name": "281", "dtype": "float32"}, {"name": "282", "dtype": "float32"}, {"name": "283", "dtype": "float32"}, {"name": "284", "dtype": "float32"}, {"name": "285", "dtype": "float32"}, {"name": "286", "dtype": "float32"}, {"name": "287", "dtype": "float32"}, {"name": "288", "dtype": "float32"}, {"name": "289", "dtype": "float32"}, {"name": "290", "dtype": "float32"}, {"name": "291", "dtype": "float32"}, {"name": "292", "dtype": "float32"}, {"name": "293", "dtype": "float32"}, {"name": "294", "dtype": "float32"}, {"name": "295", "dtype": "float32"}, {"name": "296", "dtype": "float32"}, {"name": "297", "dtype": "float32"}, {"name": "298", "dtype": "float32"}, {"name": "299", "dtype": "float32"}, {"name": "300", "dtype": "float32"}, {"name": "301", "dtype": "float32"}, {"name": "302", "dtype": "float32"}, {"name": "303", "dtype": "float32"}, {"name": "304", "dtype": "float32"}, {"name": "305", "dtype": "float32"}, {"name": "306", "dtype": "float32"}, {"name": "307", "dtype": "float32"}, {"name": "308", "dtype": "float32"}, {"name": "309", "dtype": "float32"}, {"name": "310", "dtype": "float32"}, {"name": "311", "dtype": "float32"}, {"name": "312", "dtype": "float32"}, {"name": "313", "dtype": "float32"}, {"name": "314", "dtype": "float32"}, {"name": "315", "dtype": "float32"}, {"name": "316", "dtype": "float32"}, {"name": "317", "dtype": "float32"}, {"name": "318", "dtype": "float32"}, {"name": "319", "dtype": "float32"}, {"name": "320", "dtype": "float32"}, {"name": "321", "dtype": "float32"}, {"name": "322", "dtype": "float32"}, {"name": "323", "dtype": "float32"}, {"name": "324", "dtype": "float32"}, {"name": "325", "dtype": "float32"}, {"name": "326", "dtype": "float32"}, {"name": "327", "dtype": "float32"}, {"name": "328", "dtype": "float32"}, {"name": "329", "dtype": "float32"}, {"name": "330", "dtype": "float32"}, {"name": "331", "dtype": "float32"}, {"name": "332", "dtype": "float32"}, {"name": "333", "dtype": "float32"}, {"name": "334", "dtype": "float32"}, {"name": "335", "dtype": "float32"}, {"name": "336", "dtype": "float32"}, {"name": "337", "dtype": "float32"}, {"name": "338", "dtype": "float32"}, {"name": "339", "dtype": "float32"}, {"name": "340", "dtype": "float32"}, {"name": "341", "dtype": "float32"}, {"name": "342", "dtype": "float32"}, {"name": "343", "dtype": "float32"}, {"name": "344", "dtype": "float32"}, {"name": "345", "dtype": "float32"}, {"name": "346", "dtype": "float32"}, {"name": "347", "dtype": "float32"}, {"name": "348", "dtype": "float32"}, {"name": "349", "dtype": "float32"}, {"name": "350", "dtype": "float32"}, {"name": "351", "dtype": "float32"}, {"name": "352", "dtype": "float32"}, {"name": "353", "dtype": "float32"}, {"name": "354", "dtype": "float32"}, {"name": "355", "dtype": "float32"}, {"name": "356", "dtype": "float32"}, {"name": "357", "dtype": "float32"}, {"name": "358", "dtype": "float32"}, {"name": "359", "dtype": "float32"}, {"name": "360", "dtype": "float32"}, {"name": "361", "dtype": "float32"}, {"name": "362", "dtype": "float32"}, {"name": "363", "dtype": "float32"}, {"name": "364", "dtype": "float32"}, {"name": "365", "dtype": "float32"}, {"name": "366", "dtype": "float32"}, {"name": "367", "dtype": "float32"}, {"name": "368", "dtype": "float32"}, {"name": "369", "dtype": "float32"}, {"name": "370", "dtype": "float32"}, {"name": "371", "dtype": "float32"}, {"name": "372", "dtype": "float32"}, {"name": "373", "dtype": "float32"}, {"name": "374", "dtype": "float32"}, {"name": "375", "dtype": "float32"}, {"name": "376", "dtype": "float32"}, {"name": "377", "dtype": "float32"}, {"name": "378", "dtype": "float32"}, {"name": "379", "dtype": "float32"}, {"name": "380", "dtype": "float32"}, {"name": "381", "dtype": "float32"}, {"name": "382", "dtype": "float32"}, {"name": "383", "dtype": "float32"}, {"name": "384", "dtype": "float32"}, {"name": "385", "dtype": "float32"}, {"name": "386", "dtype": "float32"}, {"name": "387", "dtype": "float32"}, {"name": "388", "dtype": "float32"}, {"name": "389", "dtype": "float32"}, {"name": "390", "dtype": "float32"}, {"name": "391", "dtype": "float32"}, {"name": "392", "dtype": "float32"}, {"name": "393", "dtype": "float32"}, {"name": "394", "dtype": "float32"}, {"name": "395", "dtype": "float32"}, {"name": "396", "dtype": "float32"}, {"name": "397", "dtype": "float32"}, {"name": "398", "dtype": "float32"}, {"name": "399", "dtype": "float32"}, {"name": "400", "dtype": "float32"}, {"name": "401", "dtype": "float32"}, {"name": "402", "dtype": "float32"}, {"name": "403", "dtype": "float32"}, {"name": "404", "dtype": "float32"}, {"name": "405", "dtype": "float32"}, {"name": "406", "dtype": "float32"}, {"name": "407", "dtype": "float32"}, {"name": "408", "dtype": "float32"}, {"name": "409", "dtype": "float32"}, {"name": "410", "dtype": "float32"}, {"name": "411", "dtype": "float32"}, {"name": "412", "dtype": "float32"}, {"name": "413", "dtype": "float32"}, {"name": "414", "dtype": "float32"}, {"name": "415", "dtype": "float32"}, {"name": "416", "dtype": "float32"}, {"name": "417", "dtype": "float32"}, {"name": "418", "dtype": "float32"}, {"name": "419", "dtype": "float32"}, {"name": "420", "dtype": "float32"}, {"name": "421", "dtype": "float32"}, {"name": "422", "dtype": "float32"}, {"name": "423", "dtype": "float32"}, {"name": "424", "dtype": "float32"}, {"name": "425", "dtype": "float32"}, {"name": "426", "dtype": "float32"}, {"name": "427", "dtype": "float32"}, {"name": "428", "dtype": "float32"}, {"name": "429", "dtype": "float32"}, {"name": "430", "dtype": "float32"}, {"name": "431", "dtype": "float32"}, {"name": "432", "dtype": "float32"}, {"name": "433", "dtype": "float32"}, {"name": "434", "dtype": "float32"}, {"name": "435", "dtype": "float32"}, {"name": "436", "dtype": "float32"}, {"name": "437", "dtype": "float32"}, {"name": "438", "dtype": "float32"}, {"name": "439", "dtype": "float32"}, {"name": "440", "dtype": "float32"}, {"name": "441", "dtype": "float32"}, {"name": "442", "dtype": "float32"}, {"name": "443", "dtype": "float32"}, {"name": "444", "dtype": "float32"}, {"name": "445", "dtype": "float32"}, {"name": "446", "dtype": "float32"}, {"name": "447", "dtype": "float32"}, {"name": "448", "dtype": "float32"}, {"name": "449", "dtype": "float32"}, {"name": "450", "dtype": "float32"}, {"name": "451", "dtype": "float32"}, {"name": "452", "dtype": "float32"}, {"name": "453", "dtype": "float32"}, {"name": "454", "dtype": "float32"}, {"name": "455", "dtype": "float32"}, {"name": "456", "dtype": "float32"}, {"name": "457", "dtype": "float32"}, {"name": "458", "dtype": "float32"}, {"name": "459", "dtype": "float32"}, {"name": "460", "dtype": "float32"}, {"name": "461", "dtype": "float32"}, {"name": "462", "dtype": "float32"}, {"name": "463", "dtype": "float32"}, {"name": "464", "dtype": "float32"}, {"name": "465", "dtype": "float32"}, {"name": "466", "dtype": "float32"}, {"name": "467", "dtype": "float32"}, {"name": "468", "dtype": "float32"}, {"name": "469", "dtype": "float32"}, {"name": "470", "dtype": "float32"}, {"name": "471", "dtype": "float32"}, {"name": "472", "dtype": "float32"}, {"name": "473", "dtype": "float32"}, {"name": "474", "dtype": "float32"}, {"name": "475", "dtype": "float32"}, {"name": "476", "dtype": "float32"}, {"name": "477", "dtype": "float32"}, {"name": "478", "dtype": "float32"}, {"name": "479", "dtype": "float32"}, {"name": "480", "dtype": "float32"}, {"name": "481", "dtype": "float32"}, {"name": "482", "dtype": "float32"}, {"name": "483", "dtype": "float32"}, {"name": "484", "dtype": "float32"}, {"name": "485", "dtype": "float32"}, {"name": "486", "dtype": "float32"}, {"name": "487", "dtype": "float32"}, {"name": "488", "dtype": "float32"}, {"name": "489", "dtype": "float32"}, {"name": "490", "dtype": "float32"}, {"name": "491", "dtype": "float32"}, {"name": "492", "dtype": "float32"}, {"name": "493", "dtype": "float32"}, {"name": "494", "dtype": "float32"}, {"name": "495", "dtype": "float32"}, {"name": "496", "dtype": "float32"}, {"name": "497", "dtype": "float32"}, {"name": "498", "dtype": "float32"}, {"name": "499", "dtype": "float32"}, {"name": "500", "dtype": "float32"}, {"name": "501", "dtype": "float32"}, {"name": "502", "dtype": "float32"}, {"name": "503", "dtype": "float32"}, {"name": "504", "dtype": "float32"}, {"name": "505", "dtype": "float32"}, {"name": "506", "dtype": "float32"}, {"name": "507", "dtype": "float32"}, {"name": "508", "dtype": "float32"}, {"name": "509", "dtype": "float32"}, {"name": "510", "dtype": "float32"}, {"name": "511", "dtype": "float32"}, {"name": "512", "dtype": "float32"}, {"name": "513", "dtype": "float32"}, {"name": "514", "dtype": "float32"}, {"name": "515", "dtype": "float32"}, {"name": "516", "dtype": "float32"}, {"name": "517", "dtype": "float32"}, {"name": "518", "dtype": "float32"}, {"name": "519", "dtype": "float32"}, {"name": "520", "dtype": "float32"}, {"name": "521", "dtype": "float32"}, {"name": "522", "dtype": "float32"}, {"name": "523", "dtype": "float32"}, {"name": "524", "dtype": "float32"}, {"name": "525", "dtype": "float32"}, {"name": "526", "dtype": "float32"}, {"name": "527", "dtype": "float32"}, {"name": "528", "dtype": "float32"}, {"name": "529", "dtype": "float32"}, {"name": "530", "dtype": "float32"}, {"name": "531", "dtype": "float32"}, {"name": "532", "dtype": "float32"}, {"name": "533", "dtype": "float32"}, {"name": "534", "dtype": "float32"}, {"name": "535", "dtype": "float32"}, {"name": "536", "dtype": "float32"}, {"name": "537", "dtype": "float32"}, {"name": "538", "dtype": "float32"}, {"name": "539", "dtype": "float32"}, {"name": "540", "dtype": "float32"}, {"name": "541", "dtype": "float32"}, {"name": "542", "dtype": "float32"}, {"name": "543", "dtype": "float32"}, {"name": "544", "dtype": "float32"}, {"name": "545", "dtype": "float32"}, {"name": "546", "dtype": "float32"}, {"name": "547", "dtype": "float32"}, {"name": "548", "dtype": "float32"}, {"name": "549", "dtype": "float32"}, {"name": "550", "dtype": "float32"}, {"name": "551", "dtype": "float32"}, {"name": "552", "dtype": "float32"}, {"name": "553", "dtype": "float32"}, {"name": "554", "dtype": "float32"}, {"name": "555", "dtype": "float32"}, {"name": "556", "dtype": "float32"}, {"name": "557", "dtype": "float32"}, {"name": "558", "dtype": "float32"}, {"name": "559", "dtype": "float32"}, {"name": "560", "dtype": "float32"}, {"name": "561", "dtype": "float32"}, {"name": "562", "dtype": "float32"}, {"name": "563", "dtype": "float32"}, {"name": "564", "dtype": "float32"}, {"name": "565", "dtype": "float32"}, {"name": "566", "dtype": "float32"}, {"name": "567", "dtype": "float32"}, {"name": "568", "dtype": "float32"}, {"name": "569", "dtype": "float32"}, {"name": "570", "dtype": "float32"}, {"name": "571", "dtype": "float32"}, {"name": "572", "dtype": "float32"}, {"name": "573", "dtype": "float32"}, {"name": "574", "dtype": "float32"}, {"name": "575", "dtype": "float32"}, {"name": "576", "dtype": "float32"}, {"name": "577", "dtype": "float32"}, {"name": "578", "dtype": "float32"}, {"name": "579", "dtype": "float32"}, {"name": "580", "dtype": "float32"}, {"name": "581", "dtype": "float32"}, {"name": "582", "dtype": "float32"}, {"name": "583", "dtype": "float32"}, {"name": "584", "dtype": "float32"}, {"name": "585", "dtype": "float32"}, {"name": "586", "dtype": "float32"}, {"name": "587", "dtype": "float32"}, {"name": "588", "dtype": "float32"}, {"name": "589", "dtype": "float32"}, {"name": "590", "dtype": "float32"}, {"name": "591", "dtype": "float32"}, {"name": "592", "dtype": "float32"}, {"name": "593", "dtype": "float32"}, {"name": "594", "dtype": "float32"}, {"name": "595", "dtype": "float32"}, {"name": "596", "dtype": "float32"}, {"name": "597", "dtype": "float32"}, {"name": "598", "dtype": "float32"}, {"name": "599", "dtype": "float32"}, {"name": "600", "dtype": "float32"}, {"name": "601", "dtype": "float32"}, {"name": "602", "dtype": "float32"}, {"name": "603", "dtype": "float32"}, {"name": "604", "dtype": "float32"}, {"name": "605", "dtype": "float32"}, {"name": "606", "dtype": "float32"}, {"name": "607", "dtype": "float32"}, {"name": "608", "dtype": "float32"}, {"name": "609", "dtype": "float32"}, {"name": "610", "dtype": "float32"}, {"name": "611", "dtype": "float32"}, {"name": "612", "dtype": "float32"}, {"name": "613", "dtype": "float32"}, {"name": "614", "dtype": "float32"}, {"name": "615", "dtype": "float32"}, {"name": "616", "dtype": "float32"}, {"name": "617", "dtype": "float32"}, {"name": "618", "dtype": "float32"}, {"name": "619", "dtype": "float32"}, {"name": "620", "dtype": "float32"}, {"name": "621", "dtype": "float32"}, {"name": "622", "dtype": "float32"}, {"name": "623", "dtype": "float32"}, {"name": "624", "dtype": "float32"}, {"name": "625", "dtype": "float32"}, {"name": "626", "dtype": "float32"}, {"name": "627", "dtype": "float32"}, {"name": "628", "dtype": "float32"}, {"name": "629", "dtype": "float32"}, {"name": "630", "dtype": "float32"}, {"name": "631", "dtype": "float32"}, {"name": "632", "dtype": "float32"}, {"name": "633", "dtype": "float32"}, {"name": "634", "dtype": "float32"}, {"name": "635", "dtype": "float32"}, {"name": "636", "dtype": "float32"}, {"name": "637", "dtype": "float32"}, {"name": "638", "dtype": "float32"}, {"name": "639", "dtype": "float32"}, {"name": "640", "dtype": "float32"}, {"name": "641", "dtype": "float32"}, {"name": "642", "dtype": "float32"}, {"name": "643", "dtype": "float32"}, {"name": "644", "dtype": "float32"}, {"name": "645", "dtype": "float32"}, {"name": "646", "dtype": "float32"}, {"name": "647", "dtype": "float32"}, {"name": "648", "dtype": "float32"}, {"name": "649", "dtype": "float32"}, {"name": "650", "dtype": "float32"}, {"name": "651", "dtype": "float32"}, {"name": "652", "dtype": "float32"}, {"name": "653", "dtype": "float32"}, {"name": "654", "dtype": "float32"}, {"name": "655", "dtype": "float32"}, {"name": "656", "dtype": "float32"}, {"name": "657", "dtype": "float32"}, {"name": "658", "dtype": "float32"}, {"name": "659", "dtype": "float32"}, {"name": "660", "dtype": "float32"}, {"name": "661", "dtype": "float32"}, {"name": "662", "dtype": "float32"}, {"name": "663", "dtype": "float32"}, {"name": "664", "dtype": "float32"}, {"name": "665", "dtype": "float32"}, {"name": "666", "dtype": "float32"}, {"name": "667", "dtype": "float32"}, {"name": "668", "dtype": "float32"}, {"name": "669", "dtype": "float32"}, {"name": "670", "dtype": "float32"}, {"name": "671", "dtype": "float32"}, {"name": "672", "dtype": "float32"}, {"name": "673", "dtype": "float32"}, {"name": "674", "dtype": "float32"}, {"name": "675", "dtype": "float32"}, {"name": "676", "dtype": "float32"}, {"name": "677", "dtype": "float32"}, {"name": "678", "dtype": "float32"}, {"name": "679", "dtype": "float32"}, {"name": "680", "dtype": "float32"}, {"name": "681", "dtype": "float32"}, {"name": "682", "dtype": "float32"}, {"name": "683", "dtype": "float32"}, {"name": "684", "dtype": "float32"}, {"name": "685", "dtype": "float32"}, {"name": "686", "dtype": "float32"}, {"name": "687", "dtype": "float32"}, {"name": "688", "dtype": "float32"}, {"name": "689", "dtype": "float32"}, {"name": "690", "dtype": "float32"}, {"name": "691", "dtype": "float32"}, {"name": "692", "dtype": "float32"}, {"name": "693", "dtype": "float32"}, {"name": "694", "dtype": "float32"}, {"name": "695", "dtype": "float32"}, {"name": "696", "dtype": "float32"}, {"name": "697", "dtype": "float32"}, {"name": "698", "dtype": "float32"}, {"name": "699", "dtype": "float32"}, {"name": "700", "dtype": "float32"}, {"name": "701", "dtype": "float32"}, {"name": "702", "dtype": "float32"}, {"name": "703", "dtype": "float32"}, {"name": "704", "dtype": "float32"}, {"name": "705", "dtype": "float32"}, {"name": "706", "dtype": "float32"}, {"name": "707", "dtype": "float32"}, {"name": "708", "dtype": "float32"}, {"name": "709", "dtype": "float32"}, {"name": "710", "dtype": "float32"}, {"name": "711", "dtype": "float32"}, {"name": "712", "dtype": "float32"}, {"name": "713", "dtype": "float32"}, {"name": "714", "dtype": "float32"}, {"name": "715", "dtype": "float32"}, {"name": "716", "dtype": "float32"}, {"name": "717", "dtype": "float32"}, {"name": "718", "dtype": "float32"}, {"name": "719", "dtype": "float32"}, {"name": "720", "dtype": "float32"}, {"name": "721", "dtype": "float32"}, {"name": "722", "dtype": "float32"}, {"name": "723", "dtype": "float32"}, {"name": "724", "dtype": "float32"}, {"name": "725", "dtype": "float32"}, {"name": "726", "dtype": "float32"}, {"name": "727", "dtype": "float32"}, {"name": "728", "dtype": "float32"}, {"name": "729", "dtype": "float32"}, {"name": "730", "dtype": "float32"}, {"name": "731", "dtype": "float32"}, {"name": "732", "dtype": "float32"}, {"name": "733", "dtype": "float32"}, {"name": "734", "dtype": "float32"}, {"name": "735", "dtype": "float32"}, {"name": "736", "dtype": "float32"}, {"name": "737", "dtype": "float32"}, {"name": "738", "dtype": "float32"}, {"name": "739", "dtype": "float32"}, {"name": "740", "dtype": "float32"}, {"name": "741", "dtype": "float32"}, {"name": "742", "dtype": "float32"}, {"name": "743", "dtype": "float32"}, {"name": "744", "dtype": "float32"}, {"name": "745", "dtype": "float32"}, {"name": "746", "dtype": "float32"}, {"name": "747", "dtype": "float32"}, {"name": "748", "dtype": "float32"}, {"name": "749", "dtype": "float32"}, {"name": "750", "dtype": "float32"}, {"name": "751", "dtype": "float32"}, {"name": "752", "dtype": "float32"}, {"name": "753", "dtype": "float32"}, {"name": "754", "dtype": "float32"}, {"name": "755", "dtype": "float32"}, {"name": "756", "dtype": "float32"}, {"name": "757", "dtype": "float32"}, {"name": "758", "dtype": "float32"}, {"name": "759", "dtype": "float32"}, {"name": "760", "dtype": "float32"}, {"name": "761", "dtype": "float32"}, {"name": "762", "dtype": "float32"}, {"name": "763", "dtype": "float32"}, {"name": "764", "dtype": "float32"}, {"name": "765", "dtype": "float32"}, {"name": "766", "dtype": "float32"}, {"name": "767", "dtype": "float32"}, {"name": "label", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 115650065.625, "num_examples": 37500}, {"name": "test", "num_bytes": 38550020.0, "num_examples": 12500}], "download_size": 211787430, "dataset_size": 154200085.625}}
2023-08-23T04:23:29+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Spirit_DistilRoBERTa_Finetuned" More Information needed
[ "# Dataset Card for \"Spirit_DistilRoBERTa_Finetuned\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Spirit_DistilRoBERTa_Finetuned\"\n\nMore Information needed" ]
[ 6, 24 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"Spirit_DistilRoBERTa_Finetuned\"\n\nMore Information needed" ]
4e163691dce181131fefbc6c381edf61d38fae11
[https://github.com/jwj7140/ko-medical-chat](https://github.com/jwj7140/ko-medical-chat) Korean medical conversation dataset from converting [MedText](https://huggingface.co/datasets/BI55/MedText) and [ChatDoctor](https://github.com/Kent0n-Li/ChatDoctor)
squarelike/ko_medical_chat
[ "language:ko", "medical", "region:us" ]
2023-08-18T17:24:58+00:00
{"language": ["ko"], "tags": ["medical"]}
2023-08-19T05:45:48+00:00
[]
[ "ko" ]
TAGS #language-Korean #medical #region-us
URL Korean medical conversation dataset from converting MedText and ChatDoctor
[]
[ "TAGS\n#language-Korean #medical #region-us \n" ]
[ 14 ]
[ "passage: TAGS\n#language-Korean #medical #region-us \n" ]
12c45cbd580abb5500fb7df94ce3f102294b9b0e
# Dataset Card for "Spirit_GPT2_Finetuned" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
EgilKarlsen/Spirit_GPT2_Finetuned
[ "region:us" ]
2023-08-18T17:26:30+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "0", "dtype": "float32"}, {"name": "1", "dtype": "float32"}, {"name": "2", "dtype": "float32"}, {"name": "3", "dtype": "float32"}, {"name": "4", "dtype": "float32"}, {"name": "5", "dtype": "float32"}, {"name": "6", "dtype": "float32"}, {"name": "7", "dtype": "float32"}, {"name": "8", "dtype": "float32"}, {"name": "9", "dtype": "float32"}, {"name": "10", "dtype": "float32"}, {"name": "11", "dtype": "float32"}, {"name": "12", "dtype": "float32"}, {"name": "13", "dtype": "float32"}, {"name": "14", "dtype": "float32"}, {"name": "15", "dtype": "float32"}, {"name": "16", "dtype": "float32"}, {"name": "17", "dtype": "float32"}, {"name": "18", "dtype": "float32"}, {"name": "19", "dtype": "float32"}, {"name": "20", "dtype": "float32"}, {"name": "21", "dtype": "float32"}, {"name": "22", "dtype": "float32"}, {"name": "23", "dtype": "float32"}, {"name": "24", "dtype": "float32"}, {"name": "25", "dtype": "float32"}, {"name": "26", "dtype": "float32"}, {"name": "27", "dtype": "float32"}, {"name": "28", "dtype": "float32"}, {"name": "29", "dtype": "float32"}, {"name": "30", "dtype": "float32"}, {"name": "31", "dtype": "float32"}, {"name": "32", "dtype": "float32"}, {"name": "33", "dtype": "float32"}, {"name": "34", "dtype": "float32"}, {"name": "35", "dtype": "float32"}, {"name": "36", "dtype": "float32"}, {"name": "37", "dtype": "float32"}, {"name": "38", "dtype": "float32"}, {"name": "39", "dtype": "float32"}, {"name": "40", "dtype": "float32"}, {"name": "41", "dtype": "float32"}, {"name": "42", "dtype": "float32"}, {"name": "43", "dtype": "float32"}, {"name": "44", "dtype": "float32"}, {"name": "45", "dtype": "float32"}, {"name": "46", "dtype": "float32"}, {"name": "47", "dtype": "float32"}, {"name": "48", "dtype": "float32"}, {"name": "49", "dtype": "float32"}, {"name": "50", "dtype": "float32"}, {"name": "51", "dtype": "float32"}, {"name": "52", "dtype": "float32"}, {"name": "53", "dtype": "float32"}, {"name": "54", "dtype": "float32"}, {"name": "55", "dtype": "float32"}, {"name": "56", "dtype": "float32"}, {"name": "57", "dtype": "float32"}, {"name": "58", "dtype": "float32"}, {"name": "59", "dtype": "float32"}, {"name": "60", "dtype": "float32"}, {"name": "61", "dtype": "float32"}, {"name": "62", "dtype": "float32"}, {"name": "63", "dtype": "float32"}, {"name": "64", "dtype": "float32"}, {"name": "65", "dtype": "float32"}, {"name": "66", "dtype": "float32"}, {"name": "67", "dtype": "float32"}, {"name": "68", "dtype": "float32"}, {"name": "69", "dtype": "float32"}, {"name": "70", "dtype": "float32"}, {"name": "71", "dtype": "float32"}, {"name": "72", "dtype": "float32"}, {"name": "73", "dtype": "float32"}, {"name": "74", "dtype": "float32"}, {"name": "75", "dtype": "float32"}, {"name": "76", "dtype": "float32"}, {"name": "77", "dtype": "float32"}, {"name": "78", "dtype": "float32"}, {"name": "79", "dtype": "float32"}, {"name": "80", "dtype": "float32"}, {"name": "81", "dtype": "float32"}, {"name": "82", "dtype": "float32"}, {"name": "83", "dtype": "float32"}, {"name": "84", "dtype": "float32"}, {"name": "85", "dtype": "float32"}, {"name": "86", "dtype": "float32"}, {"name": "87", "dtype": "float32"}, {"name": "88", "dtype": "float32"}, {"name": "89", "dtype": "float32"}, {"name": "90", "dtype": "float32"}, {"name": "91", "dtype": "float32"}, {"name": "92", "dtype": "float32"}, {"name": "93", "dtype": "float32"}, {"name": "94", "dtype": "float32"}, {"name": "95", "dtype": "float32"}, {"name": "96", "dtype": "float32"}, {"name": "97", "dtype": "float32"}, {"name": "98", "dtype": "float32"}, {"name": "99", "dtype": "float32"}, {"name": "100", "dtype": "float32"}, {"name": "101", "dtype": "float32"}, {"name": "102", "dtype": "float32"}, {"name": "103", "dtype": "float32"}, {"name": "104", "dtype": "float32"}, {"name": "105", "dtype": "float32"}, {"name": "106", "dtype": "float32"}, {"name": "107", "dtype": "float32"}, {"name": "108", "dtype": "float32"}, {"name": "109", "dtype": "float32"}, {"name": "110", "dtype": "float32"}, {"name": "111", "dtype": "float32"}, {"name": "112", "dtype": "float32"}, {"name": "113", "dtype": "float32"}, {"name": "114", "dtype": "float32"}, {"name": "115", "dtype": "float32"}, {"name": "116", "dtype": "float32"}, {"name": "117", "dtype": "float32"}, {"name": "118", "dtype": "float32"}, {"name": "119", "dtype": "float32"}, {"name": "120", "dtype": "float32"}, {"name": "121", "dtype": "float32"}, {"name": "122", "dtype": "float32"}, {"name": "123", "dtype": "float32"}, {"name": "124", "dtype": "float32"}, {"name": "125", "dtype": "float32"}, {"name": "126", "dtype": "float32"}, {"name": "127", "dtype": "float32"}, {"name": "128", "dtype": "float32"}, {"name": "129", "dtype": "float32"}, {"name": "130", "dtype": "float32"}, {"name": "131", "dtype": "float32"}, {"name": "132", "dtype": "float32"}, {"name": "133", "dtype": "float32"}, {"name": "134", "dtype": "float32"}, {"name": "135", "dtype": "float32"}, {"name": "136", "dtype": "float32"}, {"name": "137", "dtype": "float32"}, {"name": "138", "dtype": "float32"}, {"name": "139", "dtype": "float32"}, {"name": "140", "dtype": "float32"}, {"name": "141", "dtype": "float32"}, {"name": "142", "dtype": "float32"}, {"name": "143", "dtype": "float32"}, {"name": "144", "dtype": "float32"}, {"name": "145", "dtype": "float32"}, {"name": "146", "dtype": "float32"}, {"name": "147", "dtype": "float32"}, {"name": "148", "dtype": "float32"}, {"name": "149", "dtype": "float32"}, {"name": "150", "dtype": "float32"}, {"name": "151", "dtype": "float32"}, {"name": "152", "dtype": "float32"}, {"name": "153", "dtype": "float32"}, {"name": "154", "dtype": "float32"}, {"name": "155", "dtype": "float32"}, {"name": "156", "dtype": "float32"}, {"name": "157", "dtype": "float32"}, {"name": "158", "dtype": "float32"}, {"name": "159", "dtype": "float32"}, {"name": "160", "dtype": "float32"}, {"name": "161", "dtype": "float32"}, {"name": "162", "dtype": "float32"}, {"name": "163", "dtype": "float32"}, {"name": "164", "dtype": "float32"}, {"name": "165", "dtype": "float32"}, {"name": "166", "dtype": "float32"}, {"name": "167", "dtype": "float32"}, {"name": "168", "dtype": "float32"}, {"name": "169", "dtype": "float32"}, {"name": "170", "dtype": "float32"}, {"name": "171", "dtype": "float32"}, {"name": "172", "dtype": "float32"}, {"name": "173", "dtype": "float32"}, {"name": "174", "dtype": "float32"}, {"name": "175", "dtype": "float32"}, {"name": "176", "dtype": "float32"}, {"name": "177", "dtype": "float32"}, {"name": "178", "dtype": "float32"}, {"name": "179", "dtype": "float32"}, {"name": "180", "dtype": "float32"}, {"name": "181", "dtype": "float32"}, {"name": "182", "dtype": "float32"}, {"name": "183", "dtype": "float32"}, {"name": "184", "dtype": "float32"}, {"name": "185", "dtype": "float32"}, {"name": "186", "dtype": "float32"}, {"name": "187", "dtype": "float32"}, {"name": "188", "dtype": "float32"}, {"name": "189", "dtype": "float32"}, {"name": "190", "dtype": "float32"}, {"name": "191", "dtype": "float32"}, {"name": "192", "dtype": "float32"}, {"name": "193", "dtype": "float32"}, {"name": "194", "dtype": "float32"}, {"name": "195", "dtype": "float32"}, {"name": "196", "dtype": "float32"}, {"name": "197", "dtype": "float32"}, {"name": "198", "dtype": "float32"}, {"name": "199", "dtype": "float32"}, {"name": "200", "dtype": "float32"}, {"name": "201", "dtype": "float32"}, {"name": "202", "dtype": "float32"}, {"name": "203", "dtype": "float32"}, {"name": "204", "dtype": "float32"}, {"name": "205", "dtype": "float32"}, {"name": "206", "dtype": "float32"}, {"name": "207", "dtype": "float32"}, {"name": "208", "dtype": "float32"}, {"name": "209", "dtype": "float32"}, {"name": "210", "dtype": "float32"}, {"name": "211", "dtype": "float32"}, {"name": "212", "dtype": "float32"}, {"name": "213", "dtype": "float32"}, {"name": "214", "dtype": "float32"}, {"name": "215", "dtype": "float32"}, {"name": "216", "dtype": "float32"}, {"name": "217", "dtype": "float32"}, {"name": "218", "dtype": "float32"}, {"name": "219", "dtype": "float32"}, {"name": "220", "dtype": "float32"}, {"name": "221", "dtype": "float32"}, {"name": "222", "dtype": "float32"}, {"name": "223", "dtype": "float32"}, {"name": "224", "dtype": "float32"}, {"name": "225", "dtype": "float32"}, {"name": "226", "dtype": "float32"}, {"name": "227", "dtype": "float32"}, {"name": "228", "dtype": "float32"}, {"name": "229", "dtype": "float32"}, {"name": "230", "dtype": "float32"}, {"name": "231", "dtype": "float32"}, {"name": "232", "dtype": "float32"}, {"name": "233", "dtype": "float32"}, {"name": "234", "dtype": "float32"}, {"name": "235", "dtype": "float32"}, {"name": "236", "dtype": "float32"}, {"name": "237", "dtype": "float32"}, {"name": "238", "dtype": "float32"}, {"name": "239", "dtype": "float32"}, {"name": "240", "dtype": "float32"}, {"name": "241", "dtype": "float32"}, {"name": "242", "dtype": "float32"}, {"name": "243", "dtype": "float32"}, {"name": "244", "dtype": "float32"}, {"name": "245", "dtype": "float32"}, {"name": "246", "dtype": "float32"}, {"name": "247", "dtype": "float32"}, {"name": "248", "dtype": "float32"}, {"name": "249", "dtype": "float32"}, {"name": "250", "dtype": "float32"}, {"name": "251", "dtype": "float32"}, {"name": "252", "dtype": "float32"}, {"name": "253", "dtype": "float32"}, {"name": "254", "dtype": "float32"}, {"name": "255", "dtype": "float32"}, {"name": "256", "dtype": "float32"}, {"name": "257", "dtype": "float32"}, {"name": "258", "dtype": "float32"}, {"name": "259", "dtype": "float32"}, {"name": "260", "dtype": "float32"}, {"name": "261", "dtype": "float32"}, {"name": "262", "dtype": "float32"}, {"name": "263", "dtype": "float32"}, {"name": "264", "dtype": "float32"}, {"name": "265", "dtype": "float32"}, {"name": "266", "dtype": "float32"}, {"name": "267", "dtype": "float32"}, {"name": "268", "dtype": "float32"}, {"name": "269", "dtype": "float32"}, {"name": "270", "dtype": "float32"}, {"name": "271", "dtype": "float32"}, {"name": "272", "dtype": "float32"}, {"name": "273", "dtype": "float32"}, {"name": "274", "dtype": "float32"}, {"name": "275", "dtype": "float32"}, {"name": "276", "dtype": "float32"}, {"name": "277", "dtype": "float32"}, {"name": "278", "dtype": "float32"}, {"name": "279", "dtype": "float32"}, {"name": "280", "dtype": "float32"}, {"name": "281", "dtype": "float32"}, {"name": "282", "dtype": "float32"}, {"name": "283", "dtype": "float32"}, {"name": "284", "dtype": "float32"}, {"name": "285", "dtype": "float32"}, {"name": "286", "dtype": "float32"}, {"name": "287", "dtype": "float32"}, {"name": "288", "dtype": "float32"}, {"name": "289", "dtype": "float32"}, {"name": "290", "dtype": "float32"}, {"name": "291", "dtype": "float32"}, {"name": "292", "dtype": "float32"}, {"name": "293", "dtype": "float32"}, {"name": "294", "dtype": "float32"}, {"name": "295", "dtype": "float32"}, {"name": "296", "dtype": "float32"}, {"name": "297", "dtype": "float32"}, {"name": "298", "dtype": "float32"}, {"name": "299", "dtype": "float32"}, {"name": "300", "dtype": "float32"}, {"name": "301", "dtype": "float32"}, {"name": "302", "dtype": "float32"}, {"name": "303", "dtype": "float32"}, {"name": "304", "dtype": "float32"}, {"name": "305", "dtype": "float32"}, {"name": "306", "dtype": "float32"}, {"name": "307", "dtype": "float32"}, {"name": "308", "dtype": "float32"}, {"name": "309", "dtype": "float32"}, {"name": "310", "dtype": "float32"}, {"name": "311", "dtype": "float32"}, {"name": "312", "dtype": "float32"}, {"name": "313", "dtype": "float32"}, {"name": "314", "dtype": "float32"}, {"name": "315", "dtype": "float32"}, {"name": "316", "dtype": "float32"}, {"name": "317", "dtype": "float32"}, {"name": "318", "dtype": "float32"}, {"name": "319", "dtype": "float32"}, {"name": "320", "dtype": "float32"}, {"name": "321", "dtype": "float32"}, {"name": "322", "dtype": "float32"}, {"name": "323", "dtype": "float32"}, {"name": "324", "dtype": "float32"}, {"name": "325", "dtype": "float32"}, {"name": "326", "dtype": "float32"}, {"name": "327", "dtype": "float32"}, {"name": "328", "dtype": "float32"}, {"name": "329", "dtype": "float32"}, {"name": "330", "dtype": "float32"}, {"name": "331", "dtype": "float32"}, {"name": "332", "dtype": "float32"}, {"name": "333", "dtype": "float32"}, {"name": "334", "dtype": "float32"}, {"name": "335", "dtype": "float32"}, {"name": "336", "dtype": "float32"}, {"name": "337", "dtype": "float32"}, {"name": "338", "dtype": "float32"}, {"name": "339", "dtype": "float32"}, {"name": "340", "dtype": "float32"}, {"name": "341", "dtype": "float32"}, {"name": "342", "dtype": "float32"}, {"name": "343", "dtype": "float32"}, {"name": "344", "dtype": "float32"}, {"name": "345", "dtype": "float32"}, {"name": "346", "dtype": "float32"}, {"name": "347", "dtype": "float32"}, {"name": "348", "dtype": "float32"}, {"name": "349", "dtype": "float32"}, {"name": "350", "dtype": "float32"}, {"name": "351", "dtype": "float32"}, {"name": "352", "dtype": "float32"}, {"name": "353", "dtype": "float32"}, {"name": "354", "dtype": "float32"}, {"name": "355", "dtype": "float32"}, {"name": "356", "dtype": "float32"}, {"name": "357", "dtype": "float32"}, {"name": "358", "dtype": "float32"}, {"name": "359", "dtype": "float32"}, {"name": "360", "dtype": "float32"}, {"name": "361", "dtype": "float32"}, {"name": "362", "dtype": "float32"}, {"name": "363", "dtype": "float32"}, {"name": "364", "dtype": "float32"}, {"name": "365", "dtype": "float32"}, {"name": "366", "dtype": "float32"}, {"name": "367", "dtype": "float32"}, {"name": "368", "dtype": "float32"}, {"name": "369", "dtype": "float32"}, {"name": "370", "dtype": "float32"}, {"name": "371", "dtype": "float32"}, {"name": "372", "dtype": "float32"}, {"name": "373", "dtype": "float32"}, {"name": "374", "dtype": "float32"}, {"name": "375", "dtype": "float32"}, {"name": "376", "dtype": "float32"}, {"name": "377", "dtype": "float32"}, {"name": "378", "dtype": "float32"}, {"name": "379", "dtype": "float32"}, {"name": "380", "dtype": "float32"}, {"name": "381", "dtype": "float32"}, {"name": "382", "dtype": "float32"}, {"name": "383", "dtype": "float32"}, {"name": "384", "dtype": "float32"}, {"name": "385", "dtype": "float32"}, {"name": "386", "dtype": "float32"}, {"name": "387", "dtype": "float32"}, {"name": "388", "dtype": "float32"}, {"name": "389", "dtype": "float32"}, {"name": "390", "dtype": "float32"}, {"name": "391", "dtype": "float32"}, {"name": "392", "dtype": "float32"}, {"name": "393", "dtype": "float32"}, {"name": "394", "dtype": "float32"}, {"name": "395", "dtype": "float32"}, {"name": "396", "dtype": "float32"}, {"name": "397", "dtype": "float32"}, {"name": "398", "dtype": "float32"}, {"name": "399", "dtype": "float32"}, {"name": "400", "dtype": "float32"}, {"name": "401", "dtype": "float32"}, {"name": "402", "dtype": "float32"}, {"name": "403", "dtype": "float32"}, {"name": "404", "dtype": "float32"}, {"name": "405", "dtype": "float32"}, {"name": "406", "dtype": "float32"}, {"name": "407", "dtype": "float32"}, {"name": "408", "dtype": "float32"}, {"name": "409", "dtype": "float32"}, {"name": "410", "dtype": "float32"}, {"name": "411", "dtype": "float32"}, {"name": "412", "dtype": "float32"}, {"name": "413", "dtype": "float32"}, {"name": "414", "dtype": "float32"}, {"name": "415", "dtype": "float32"}, {"name": "416", "dtype": "float32"}, {"name": "417", "dtype": "float32"}, {"name": "418", "dtype": "float32"}, {"name": "419", "dtype": "float32"}, {"name": "420", "dtype": "float32"}, {"name": "421", "dtype": "float32"}, {"name": "422", "dtype": "float32"}, {"name": "423", "dtype": "float32"}, {"name": "424", "dtype": "float32"}, {"name": "425", "dtype": "float32"}, {"name": "426", "dtype": "float32"}, {"name": "427", "dtype": "float32"}, {"name": "428", "dtype": "float32"}, {"name": "429", "dtype": "float32"}, {"name": "430", "dtype": "float32"}, {"name": "431", "dtype": "float32"}, {"name": "432", "dtype": "float32"}, {"name": "433", "dtype": "float32"}, {"name": "434", "dtype": "float32"}, {"name": "435", "dtype": "float32"}, {"name": "436", "dtype": "float32"}, {"name": "437", "dtype": "float32"}, {"name": "438", "dtype": "float32"}, {"name": "439", "dtype": "float32"}, {"name": "440", "dtype": "float32"}, {"name": "441", "dtype": "float32"}, {"name": "442", "dtype": "float32"}, {"name": "443", "dtype": "float32"}, {"name": "444", "dtype": "float32"}, {"name": "445", "dtype": "float32"}, {"name": "446", "dtype": "float32"}, {"name": "447", "dtype": "float32"}, {"name": "448", "dtype": "float32"}, {"name": "449", "dtype": "float32"}, {"name": "450", "dtype": "float32"}, {"name": "451", "dtype": "float32"}, {"name": "452", "dtype": "float32"}, {"name": "453", "dtype": "float32"}, {"name": "454", "dtype": "float32"}, {"name": "455", "dtype": "float32"}, {"name": "456", "dtype": "float32"}, {"name": "457", "dtype": "float32"}, {"name": "458", "dtype": "float32"}, {"name": "459", "dtype": "float32"}, {"name": "460", "dtype": "float32"}, {"name": "461", "dtype": "float32"}, {"name": "462", "dtype": "float32"}, {"name": "463", "dtype": "float32"}, {"name": "464", "dtype": "float32"}, {"name": "465", "dtype": "float32"}, {"name": "466", "dtype": "float32"}, {"name": "467", "dtype": "float32"}, {"name": "468", "dtype": "float32"}, {"name": "469", "dtype": "float32"}, {"name": "470", "dtype": "float32"}, {"name": "471", "dtype": "float32"}, {"name": "472", "dtype": "float32"}, {"name": "473", "dtype": "float32"}, {"name": "474", "dtype": "float32"}, {"name": "475", "dtype": "float32"}, {"name": "476", "dtype": "float32"}, {"name": "477", "dtype": "float32"}, {"name": "478", "dtype": "float32"}, {"name": "479", "dtype": "float32"}, {"name": "480", "dtype": "float32"}, {"name": "481", "dtype": "float32"}, {"name": "482", "dtype": "float32"}, {"name": "483", "dtype": "float32"}, {"name": "484", "dtype": "float32"}, {"name": "485", "dtype": "float32"}, {"name": "486", "dtype": "float32"}, {"name": "487", "dtype": "float32"}, {"name": "488", "dtype": "float32"}, {"name": "489", "dtype": "float32"}, {"name": "490", "dtype": "float32"}, {"name": "491", "dtype": "float32"}, {"name": "492", "dtype": "float32"}, {"name": "493", "dtype": "float32"}, {"name": "494", "dtype": "float32"}, {"name": "495", "dtype": "float32"}, {"name": "496", "dtype": "float32"}, {"name": "497", "dtype": "float32"}, {"name": "498", "dtype": "float32"}, {"name": "499", "dtype": "float32"}, {"name": "500", "dtype": "float32"}, {"name": "501", "dtype": "float32"}, {"name": "502", "dtype": "float32"}, {"name": "503", "dtype": "float32"}, {"name": "504", "dtype": "float32"}, {"name": "505", "dtype": "float32"}, {"name": "506", "dtype": "float32"}, {"name": "507", "dtype": "float32"}, {"name": "508", "dtype": "float32"}, {"name": "509", "dtype": "float32"}, {"name": "510", "dtype": "float32"}, {"name": "511", "dtype": "float32"}, {"name": "512", "dtype": "float32"}, {"name": "513", "dtype": "float32"}, {"name": "514", "dtype": "float32"}, {"name": "515", "dtype": "float32"}, {"name": "516", "dtype": "float32"}, {"name": "517", "dtype": "float32"}, {"name": "518", "dtype": "float32"}, {"name": "519", "dtype": "float32"}, {"name": "520", "dtype": "float32"}, {"name": "521", "dtype": "float32"}, {"name": "522", "dtype": "float32"}, {"name": "523", "dtype": "float32"}, {"name": "524", "dtype": "float32"}, {"name": "525", "dtype": "float32"}, {"name": "526", "dtype": "float32"}, {"name": "527", "dtype": "float32"}, {"name": "528", "dtype": "float32"}, {"name": "529", "dtype": "float32"}, {"name": "530", "dtype": "float32"}, {"name": "531", "dtype": "float32"}, {"name": "532", "dtype": "float32"}, {"name": "533", "dtype": "float32"}, {"name": "534", "dtype": "float32"}, {"name": "535", "dtype": "float32"}, {"name": "536", "dtype": "float32"}, {"name": "537", "dtype": "float32"}, {"name": "538", "dtype": "float32"}, {"name": "539", "dtype": "float32"}, {"name": "540", "dtype": "float32"}, {"name": "541", "dtype": "float32"}, {"name": "542", "dtype": "float32"}, {"name": "543", "dtype": "float32"}, {"name": "544", "dtype": "float32"}, {"name": "545", "dtype": "float32"}, {"name": "546", "dtype": "float32"}, {"name": "547", "dtype": "float32"}, {"name": "548", "dtype": "float32"}, {"name": "549", "dtype": "float32"}, {"name": "550", "dtype": "float32"}, {"name": "551", "dtype": "float32"}, {"name": "552", "dtype": "float32"}, {"name": "553", "dtype": "float32"}, {"name": "554", "dtype": "float32"}, {"name": "555", "dtype": "float32"}, {"name": "556", "dtype": "float32"}, {"name": "557", "dtype": "float32"}, {"name": "558", "dtype": "float32"}, {"name": "559", "dtype": "float32"}, {"name": "560", "dtype": "float32"}, {"name": "561", "dtype": "float32"}, {"name": "562", "dtype": "float32"}, {"name": "563", "dtype": "float32"}, {"name": "564", "dtype": "float32"}, {"name": "565", "dtype": "float32"}, {"name": "566", "dtype": "float32"}, {"name": "567", "dtype": "float32"}, {"name": "568", "dtype": "float32"}, {"name": "569", "dtype": "float32"}, {"name": "570", "dtype": "float32"}, {"name": "571", "dtype": "float32"}, {"name": "572", "dtype": "float32"}, {"name": "573", "dtype": "float32"}, {"name": "574", "dtype": "float32"}, {"name": "575", "dtype": "float32"}, {"name": "576", "dtype": "float32"}, {"name": "577", "dtype": "float32"}, {"name": "578", "dtype": "float32"}, {"name": "579", "dtype": "float32"}, {"name": "580", "dtype": "float32"}, {"name": "581", "dtype": "float32"}, {"name": "582", "dtype": "float32"}, {"name": "583", "dtype": "float32"}, {"name": "584", "dtype": "float32"}, {"name": "585", "dtype": "float32"}, {"name": "586", "dtype": "float32"}, {"name": "587", "dtype": "float32"}, {"name": "588", "dtype": "float32"}, {"name": "589", "dtype": "float32"}, {"name": "590", "dtype": "float32"}, {"name": "591", "dtype": "float32"}, {"name": "592", "dtype": "float32"}, {"name": "593", "dtype": "float32"}, {"name": "594", "dtype": "float32"}, {"name": "595", "dtype": "float32"}, {"name": "596", "dtype": "float32"}, {"name": "597", "dtype": "float32"}, {"name": "598", "dtype": "float32"}, {"name": "599", "dtype": "float32"}, {"name": "600", "dtype": "float32"}, {"name": "601", "dtype": "float32"}, {"name": "602", "dtype": "float32"}, {"name": "603", "dtype": "float32"}, {"name": "604", "dtype": "float32"}, {"name": "605", "dtype": "float32"}, {"name": "606", "dtype": "float32"}, {"name": "607", "dtype": "float32"}, {"name": "608", "dtype": "float32"}, {"name": "609", "dtype": "float32"}, {"name": "610", "dtype": "float32"}, {"name": "611", "dtype": "float32"}, {"name": "612", "dtype": "float32"}, {"name": "613", "dtype": "float32"}, {"name": "614", "dtype": "float32"}, {"name": "615", "dtype": "float32"}, {"name": "616", "dtype": "float32"}, {"name": "617", "dtype": "float32"}, {"name": "618", "dtype": "float32"}, {"name": "619", "dtype": "float32"}, {"name": "620", "dtype": "float32"}, {"name": "621", "dtype": "float32"}, {"name": "622", "dtype": "float32"}, {"name": "623", "dtype": "float32"}, {"name": "624", "dtype": "float32"}, {"name": "625", "dtype": "float32"}, {"name": "626", "dtype": "float32"}, {"name": "627", "dtype": "float32"}, {"name": "628", "dtype": "float32"}, {"name": "629", "dtype": "float32"}, {"name": "630", "dtype": "float32"}, {"name": "631", "dtype": "float32"}, {"name": "632", "dtype": "float32"}, {"name": "633", "dtype": "float32"}, {"name": "634", "dtype": "float32"}, {"name": "635", "dtype": "float32"}, {"name": "636", "dtype": "float32"}, {"name": "637", "dtype": "float32"}, {"name": "638", "dtype": "float32"}, {"name": "639", "dtype": "float32"}, {"name": "640", "dtype": "float32"}, {"name": "641", "dtype": "float32"}, {"name": "642", "dtype": "float32"}, {"name": "643", "dtype": "float32"}, {"name": "644", "dtype": "float32"}, {"name": "645", "dtype": "float32"}, {"name": "646", "dtype": "float32"}, {"name": "647", "dtype": "float32"}, {"name": "648", "dtype": "float32"}, {"name": "649", "dtype": "float32"}, {"name": "650", "dtype": "float32"}, {"name": "651", "dtype": "float32"}, {"name": "652", "dtype": "float32"}, {"name": "653", "dtype": "float32"}, {"name": "654", "dtype": "float32"}, {"name": "655", "dtype": "float32"}, {"name": "656", "dtype": "float32"}, {"name": "657", "dtype": "float32"}, {"name": "658", "dtype": "float32"}, {"name": "659", "dtype": "float32"}, {"name": "660", "dtype": "float32"}, {"name": "661", "dtype": "float32"}, {"name": "662", "dtype": "float32"}, {"name": "663", "dtype": "float32"}, {"name": "664", "dtype": "float32"}, {"name": "665", "dtype": "float32"}, {"name": "666", "dtype": "float32"}, {"name": "667", "dtype": "float32"}, {"name": "668", "dtype": "float32"}, {"name": "669", "dtype": "float32"}, {"name": "670", "dtype": "float32"}, {"name": "671", "dtype": "float32"}, {"name": "672", "dtype": "float32"}, {"name": "673", "dtype": "float32"}, {"name": "674", "dtype": "float32"}, {"name": "675", "dtype": "float32"}, {"name": "676", "dtype": "float32"}, {"name": "677", "dtype": "float32"}, {"name": "678", "dtype": "float32"}, {"name": "679", "dtype": "float32"}, {"name": "680", "dtype": "float32"}, {"name": "681", "dtype": "float32"}, {"name": "682", "dtype": "float32"}, {"name": "683", "dtype": "float32"}, {"name": "684", "dtype": "float32"}, {"name": "685", "dtype": "float32"}, {"name": "686", "dtype": "float32"}, {"name": "687", "dtype": "float32"}, {"name": "688", "dtype": "float32"}, {"name": "689", "dtype": "float32"}, {"name": "690", "dtype": "float32"}, {"name": "691", "dtype": "float32"}, {"name": "692", "dtype": "float32"}, {"name": "693", "dtype": "float32"}, {"name": "694", "dtype": "float32"}, {"name": "695", "dtype": "float32"}, {"name": "696", "dtype": "float32"}, {"name": "697", "dtype": "float32"}, {"name": "698", "dtype": "float32"}, {"name": "699", "dtype": "float32"}, {"name": "700", "dtype": "float32"}, {"name": "701", "dtype": "float32"}, {"name": "702", "dtype": "float32"}, {"name": "703", "dtype": "float32"}, {"name": "704", "dtype": "float32"}, {"name": "705", "dtype": "float32"}, {"name": "706", "dtype": "float32"}, {"name": "707", "dtype": "float32"}, {"name": "708", "dtype": "float32"}, {"name": "709", "dtype": "float32"}, {"name": "710", "dtype": "float32"}, {"name": "711", "dtype": "float32"}, {"name": "712", "dtype": "float32"}, {"name": "713", "dtype": "float32"}, {"name": "714", "dtype": "float32"}, {"name": "715", "dtype": "float32"}, {"name": "716", "dtype": "float32"}, {"name": "717", "dtype": "float32"}, {"name": "718", "dtype": "float32"}, {"name": "719", "dtype": "float32"}, {"name": "720", "dtype": "float32"}, {"name": "721", "dtype": "float32"}, {"name": "722", "dtype": "float32"}, {"name": "723", "dtype": "float32"}, {"name": "724", "dtype": "float32"}, {"name": "725", "dtype": "float32"}, {"name": "726", "dtype": "float32"}, {"name": "727", "dtype": "float32"}, {"name": "728", "dtype": "float32"}, {"name": "729", "dtype": "float32"}, {"name": "730", "dtype": "float32"}, {"name": "731", "dtype": "float32"}, {"name": "732", "dtype": "float32"}, {"name": "733", "dtype": "float32"}, {"name": "734", "dtype": "float32"}, {"name": "735", "dtype": "float32"}, {"name": "736", "dtype": "float32"}, {"name": "737", "dtype": "float32"}, {"name": "738", "dtype": "float32"}, {"name": "739", "dtype": "float32"}, {"name": "740", "dtype": "float32"}, {"name": "741", "dtype": "float32"}, {"name": "742", "dtype": "float32"}, {"name": "743", "dtype": "float32"}, {"name": "744", "dtype": "float32"}, {"name": "745", "dtype": "float32"}, {"name": "746", "dtype": "float32"}, {"name": "747", "dtype": "float32"}, {"name": "748", "dtype": "float32"}, {"name": "749", "dtype": "float32"}, {"name": "750", "dtype": "float32"}, {"name": "751", "dtype": "float32"}, {"name": "752", "dtype": "float32"}, {"name": "753", "dtype": "float32"}, {"name": "754", "dtype": "float32"}, {"name": "755", "dtype": "float32"}, {"name": "756", "dtype": "float32"}, {"name": "757", "dtype": "float32"}, {"name": "758", "dtype": "float32"}, {"name": "759", "dtype": "float32"}, {"name": "760", "dtype": "float32"}, {"name": "761", "dtype": "float32"}, {"name": "762", "dtype": "float32"}, {"name": "763", "dtype": "float32"}, {"name": "764", "dtype": "float32"}, {"name": "765", "dtype": "float32"}, {"name": "766", "dtype": "float32"}, {"name": "767", "dtype": "float32"}, {"name": "label", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 115650065.625, "num_examples": 37500}, {"name": "test", "num_bytes": 38550020.0, "num_examples": 12500}], "download_size": 211753822, "dataset_size": 154200085.625}}
2023-08-23T04:30:24+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Spirit_GPT2_Finetuned" More Information needed
[ "# Dataset Card for \"Spirit_GPT2_Finetuned\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Spirit_GPT2_Finetuned\"\n\nMore Information needed" ]
[ 6, 22 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"Spirit_GPT2_Finetuned\"\n\nMore Information needed" ]
5ce454379e2c42b1e455fd43f2d94ed6dbff900a
## Video Augmented Texts Data ### VATEX Each video contains 10 captions. In `vatex.zip`, there are: * `test/`: a folder containing all available videos * `vatex_public_test_english_v1.1.json`: JSON file containing all captions Example data loading: ```py import os import json path = 'vatex_public_test_english_v1.1.json' d = json.load(open(path, 'r')) captions = {v['videoID']: v['enCap'] for v in d} for vname in captions: video_path = os.path.join('test', vname+'.mp4') # path to the video captions = captions[vname] # a list of 10 str ``` ### MSR-VTT Each video contains 1 caption. There are two files for MSR-VTT: * `MSRVTT.zip`: contains all videos * `MSRVTT_JSFUSION_test.csv`: contains all captions Example data loading: ```py import os import pandas as pd path = 'MSRVTT_JSFUSION_test.csv' df = pd.read_csv(path) vid_id_list = df['video_id'].tolist() caption_list = df['sentence'].tolist() for vid_id, caption in zip(vid_id_list, caption_list): video_path = os.path.join('MSRVTT', 'videos', 'all', vid_id+'.mp4') captions = [caption] # a list of 1 str ```
wufeim/aug_text
[ "region:us" ]
2023-08-18T17:38:13+00:00
{}
2023-08-30T04:04:49+00:00
[]
[]
TAGS #region-us
## Video Augmented Texts Data ### VATEX Each video contains 10 captions. In 'URL', there are: * 'test/': a folder containing all available videos * 'vatex_public_test_english_v1.1.json': JSON file containing all captions Example data loading: ### MSR-VTT Each video contains 1 caption. There are two files for MSR-VTT: * 'URL': contains all videos * 'MSRVTT_JSFUSION_test.csv': contains all captions Example data loading:
[ "## Video Augmented Texts Data", "### VATEX\n\nEach video contains 10 captions. In 'URL', there are:\n\n* 'test/': a folder containing all available videos\n* 'vatex_public_test_english_v1.1.json': JSON file containing all captions\n\nExample data loading:", "### MSR-VTT\n\nEach video contains 1 caption. There are two files for MSR-VTT:\n\n* 'URL': contains all videos\n* 'MSRVTT_JSFUSION_test.csv': contains all captions\n\nExample data loading:" ]
[ "TAGS\n#region-us \n", "## Video Augmented Texts Data", "### VATEX\n\nEach video contains 10 captions. In 'URL', there are:\n\n* 'test/': a folder containing all available videos\n* 'vatex_public_test_english_v1.1.json': JSON file containing all captions\n\nExample data loading:", "### MSR-VTT\n\nEach video contains 1 caption. There are two files for MSR-VTT:\n\n* 'URL': contains all videos\n* 'MSRVTT_JSFUSION_test.csv': contains all captions\n\nExample data loading:" ]
[ 6, 8, 66, 61 ]
[ "passage: TAGS\n#region-us \n## Video Augmented Texts Data### VATEX\n\nEach video contains 10 captions. In 'URL', there are:\n\n* 'test/': a folder containing all available videos\n* 'vatex_public_test_english_v1.1.json': JSON file containing all captions\n\nExample data loading:### MSR-VTT\n\nEach video contains 1 caption. There are two files for MSR-VTT:\n\n* 'URL': contains all videos\n* 'MSRVTT_JSFUSION_test.csv': contains all captions\n\nExample data loading:" ]
225b57be816704d78076b9c57ae3f5ea56e05b8c
# Dataset Card for Evaluation run of Kunhao/pile-7b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Kunhao/pile-7b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Kunhao/pile-7b](https://huggingface.co/Kunhao/pile-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Kunhao__pile-7b", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-17T14:02:00.215909](https://huggingface.co/datasets/open-llm-leaderboard/details_Kunhao__pile-7b/blob/main/results_2023-08-17T14%3A02%3A00.215909.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.26607314141949256, "acc_stderr": 0.031950603341667064, "acc_norm": 0.2676071883857905, "acc_norm_stderr": 0.03196207703098002, "mc1": 0.23378212974296206, "mc1_stderr": 0.014816195991931572, "mc2": 0.4240744665255174, "mc2_stderr": 0.014948776413812296 }, "harness|arc:challenge|25": { "acc": 0.2380546075085324, "acc_stderr": 0.012445770028026203, "acc_norm": 0.26791808873720135, "acc_norm_stderr": 0.01294203019513643 }, "harness|hellaswag|10": { "acc": 0.3269269069906393, "acc_stderr": 0.004681316064444439, "acc_norm": 0.3875721967735511, "acc_norm_stderr": 0.004862003566798543 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.24, "acc_stderr": 0.04292346959909283, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.23703703703703705, "acc_stderr": 0.03673731683969506, "acc_norm": 0.23703703703703705, "acc_norm_stderr": 0.03673731683969506 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.2631578947368421, "acc_stderr": 0.035834961763610625, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.035834961763610625 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.21132075471698114, "acc_stderr": 0.025125766484827842, "acc_norm": 0.21132075471698114, "acc_norm_stderr": 0.025125766484827842 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.20833333333333334, "acc_stderr": 0.033961162058453336, "acc_norm": 0.20833333333333334, "acc_norm_stderr": 0.033961162058453336 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.24277456647398843, "acc_stderr": 0.0326926380614177, "acc_norm": 0.24277456647398843, "acc_norm_stderr": 0.0326926380614177 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.28431372549019607, "acc_stderr": 0.04488482852329017, "acc_norm": 0.28431372549019607, "acc_norm_stderr": 0.04488482852329017 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.22, "acc_stderr": 0.0416333199893227, "acc_norm": 0.22, "acc_norm_stderr": 0.0416333199893227 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.23404255319148937, "acc_stderr": 0.02767845257821239, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.02767845257821239 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813344, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813344 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.25517241379310346, "acc_stderr": 0.03632984052707841, "acc_norm": 0.25517241379310346, "acc_norm_stderr": 0.03632984052707841 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.25132275132275134, "acc_stderr": 0.022340482339643898, "acc_norm": 0.25132275132275134, "acc_norm_stderr": 0.022340482339643898 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.20634920634920634, "acc_stderr": 0.036196045241242515, "acc_norm": 0.20634920634920634, "acc_norm_stderr": 0.036196045241242515 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.27741935483870966, "acc_stderr": 0.025470196835900055, "acc_norm": 0.27741935483870966, "acc_norm_stderr": 0.025470196835900055 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.24630541871921183, "acc_stderr": 0.030315099285617722, "acc_norm": 0.24630541871921183, "acc_norm_stderr": 0.030315099285617722 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.2545454545454545, "acc_stderr": 0.0340150671524904, "acc_norm": 0.2545454545454545, "acc_norm_stderr": 0.0340150671524904 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.35858585858585856, "acc_stderr": 0.03416903640391521, "acc_norm": 0.35858585858585856, "acc_norm_stderr": 0.03416903640391521 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.36787564766839376, "acc_stderr": 0.03480175668466036, "acc_norm": 0.36787564766839376, "acc_norm_stderr": 0.03480175668466036 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.31025641025641026, "acc_stderr": 0.023454674889404288, "acc_norm": 0.31025641025641026, "acc_norm_stderr": 0.023454674889404288 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.028742040903948492, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.028742040903948492 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.2184873949579832, "acc_stderr": 0.026841514322958948, "acc_norm": 0.2184873949579832, "acc_norm_stderr": 0.026841514322958948 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.271523178807947, "acc_stderr": 0.036313298039696525, "acc_norm": 0.271523178807947, "acc_norm_stderr": 0.036313298039696525 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.25871559633027524, "acc_stderr": 0.018776052319619624, "acc_norm": 0.25871559633027524, "acc_norm_stderr": 0.018776052319619624 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.46296296296296297, "acc_stderr": 0.03400603625538272, "acc_norm": 0.46296296296296297, "acc_norm_stderr": 0.03400603625538272 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.27941176470588236, "acc_stderr": 0.031493281045079556, "acc_norm": 0.27941176470588236, "acc_norm_stderr": 0.031493281045079556 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.2109704641350211, "acc_stderr": 0.026558372502661923, "acc_norm": 0.2109704641350211, "acc_norm_stderr": 0.026558372502661923 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.28699551569506726, "acc_stderr": 0.030360379710291954, "acc_norm": 0.28699551569506726, "acc_norm_stderr": 0.030360379710291954 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.25190839694656486, "acc_stderr": 0.03807387116306085, "acc_norm": 0.25190839694656486, "acc_norm_stderr": 0.03807387116306085 }, "harness|hendrycksTest-international_law|5": { "acc": 0.256198347107438, "acc_stderr": 0.039849796533028725, "acc_norm": 0.256198347107438, "acc_norm_stderr": 0.039849796533028725 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.2222222222222222, "acc_stderr": 0.040191074725573483, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.040191074725573483 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22699386503067484, "acc_stderr": 0.03291099578615769, "acc_norm": 0.22699386503067484, "acc_norm_stderr": 0.03291099578615769 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.2857142857142857, "acc_stderr": 0.042878587513404565, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.042878587513404565 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.1794871794871795, "acc_stderr": 0.02514093595033545, "acc_norm": 0.1794871794871795, "acc_norm_stderr": 0.02514093595033545 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.24648786717752236, "acc_stderr": 0.015411308769686941, "acc_norm": 0.24648786717752236, "acc_norm_stderr": 0.015411308769686941 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.25722543352601157, "acc_stderr": 0.02353292543104428, "acc_norm": 0.25722543352601157, "acc_norm_stderr": 0.02353292543104428 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23687150837988827, "acc_stderr": 0.01421957078810398, "acc_norm": 0.23687150837988827, "acc_norm_stderr": 0.01421957078810398 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.2549019607843137, "acc_stderr": 0.02495418432487991, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.02495418432487991 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.24115755627009647, "acc_stderr": 0.024296594034763426, "acc_norm": 0.24115755627009647, "acc_norm_stderr": 0.024296594034763426 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.24074074074074073, "acc_stderr": 0.023788583551658544, "acc_norm": 0.24074074074074073, "acc_norm_stderr": 0.023788583551658544 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.24822695035460993, "acc_stderr": 0.025770015644290413, "acc_norm": 0.24822695035460993, "acc_norm_stderr": 0.025770015644290413 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.24902216427640156, "acc_stderr": 0.011044892264040772, "acc_norm": 0.24902216427640156, "acc_norm_stderr": 0.011044892264040772 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4227941176470588, "acc_stderr": 0.03000856284500347, "acc_norm": 0.4227941176470588, "acc_norm_stderr": 0.03000856284500347 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.24673202614379086, "acc_stderr": 0.0174408203674025, "acc_norm": 0.24673202614379086, "acc_norm_stderr": 0.0174408203674025 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.19090909090909092, "acc_stderr": 0.03764425585984924, "acc_norm": 0.19090909090909092, "acc_norm_stderr": 0.03764425585984924 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.3877551020408163, "acc_stderr": 0.031192230726795656, "acc_norm": 0.3877551020408163, "acc_norm_stderr": 0.031192230726795656 }, "harness|hendrycksTest-sociology|5": { "acc": 0.25870646766169153, "acc_stderr": 0.030965903123573026, "acc_norm": 0.25870646766169153, "acc_norm_stderr": 0.030965903123573026 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.26, "acc_stderr": 0.04408440022768078, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-virology|5": { "acc": 0.2891566265060241, "acc_stderr": 0.03529486801511115, "acc_norm": 0.2891566265060241, "acc_norm_stderr": 0.03529486801511115 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.2573099415204678, "acc_stderr": 0.03352799844161865, "acc_norm": 0.2573099415204678, "acc_norm_stderr": 0.03352799844161865 }, "harness|truthfulqa:mc|0": { "mc1": 0.23378212974296206, "mc1_stderr": 0.014816195991931572, "mc2": 0.4240744665255174, "mc2_stderr": 0.014948776413812296 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_Kunhao__pile-7b
[ "region:us" ]
2023-08-18T17:39:47+00:00
{"pretty_name": "Evaluation run of Kunhao/pile-7b", "dataset_summary": "Dataset automatically created during the evaluation run of model [Kunhao/pile-7b](https://huggingface.co/Kunhao/pile-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Kunhao__pile-7b\",\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-08-17T14:02:00.215909](https://huggingface.co/datasets/open-llm-leaderboard/details_Kunhao__pile-7b/blob/main/results_2023-08-17T14%3A02%3A00.215909.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.26607314141949256,\n \"acc_stderr\": 0.031950603341667064,\n \"acc_norm\": 0.2676071883857905,\n \"acc_norm_stderr\": 0.03196207703098002,\n \"mc1\": 0.23378212974296206,\n \"mc1_stderr\": 0.014816195991931572,\n \"mc2\": 0.4240744665255174,\n \"mc2_stderr\": 0.014948776413812296\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.2380546075085324,\n \"acc_stderr\": 0.012445770028026203,\n \"acc_norm\": 0.26791808873720135,\n \"acc_norm_stderr\": 0.01294203019513643\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.3269269069906393,\n \"acc_stderr\": 0.004681316064444439,\n \"acc_norm\": 0.3875721967735511,\n \"acc_norm_stderr\": 0.004862003566798543\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.23703703703703705,\n \"acc_stderr\": 0.03673731683969506,\n \"acc_norm\": 0.23703703703703705,\n \"acc_norm_stderr\": 0.03673731683969506\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.2631578947368421,\n \"acc_stderr\": 0.035834961763610625,\n \"acc_norm\": 0.2631578947368421,\n \"acc_norm_stderr\": 0.035834961763610625\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.21132075471698114,\n \"acc_stderr\": 0.025125766484827842,\n \"acc_norm\": 0.21132075471698114,\n \"acc_norm_stderr\": 0.025125766484827842\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.20833333333333334,\n \"acc_stderr\": 0.033961162058453336,\n \"acc_norm\": 0.20833333333333334,\n \"acc_norm_stderr\": 0.033961162058453336\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.24277456647398843,\n \"acc_stderr\": 0.0326926380614177,\n \"acc_norm\": 0.24277456647398843,\n \"acc_norm_stderr\": 0.0326926380614177\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.28431372549019607,\n \"acc_stderr\": 0.04488482852329017,\n \"acc_norm\": 0.28431372549019607,\n \"acc_norm_stderr\": 0.04488482852329017\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.0416333199893227,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.0416333199893227\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.23404255319148937,\n \"acc_stderr\": 0.02767845257821239,\n \"acc_norm\": 0.23404255319148937,\n \"acc_norm_stderr\": 0.02767845257821239\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n \"acc_stderr\": 0.039994238792813344,\n \"acc_norm\": 0.23684210526315788,\n \"acc_norm_stderr\": 0.039994238792813344\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.25517241379310346,\n \"acc_stderr\": 0.03632984052707841,\n \"acc_norm\": 0.25517241379310346,\n \"acc_norm_stderr\": 0.03632984052707841\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.25132275132275134,\n \"acc_stderr\": 0.022340482339643898,\n \"acc_norm\": 0.25132275132275134,\n \"acc_norm_stderr\": 0.022340482339643898\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.20634920634920634,\n \"acc_stderr\": 0.036196045241242515,\n \"acc_norm\": 0.20634920634920634,\n \"acc_norm_stderr\": 0.036196045241242515\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.27741935483870966,\n \"acc_stderr\": 0.025470196835900055,\n \"acc_norm\": 0.27741935483870966,\n \"acc_norm_stderr\": 0.025470196835900055\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.24630541871921183,\n \"acc_stderr\": 0.030315099285617722,\n \"acc_norm\": 0.24630541871921183,\n \"acc_norm_stderr\": 0.030315099285617722\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.2545454545454545,\n \"acc_stderr\": 0.0340150671524904,\n \"acc_norm\": 0.2545454545454545,\n \"acc_norm_stderr\": 0.0340150671524904\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.35858585858585856,\n \"acc_stderr\": 0.03416903640391521,\n \"acc_norm\": 0.35858585858585856,\n \"acc_norm_stderr\": 0.03416903640391521\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.36787564766839376,\n \"acc_stderr\": 0.03480175668466036,\n \"acc_norm\": 0.36787564766839376,\n \"acc_norm_stderr\": 0.03480175668466036\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.31025641025641026,\n \"acc_stderr\": 0.023454674889404288,\n \"acc_norm\": 0.31025641025641026,\n \"acc_norm_stderr\": 0.023454674889404288\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.028742040903948492,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.028742040903948492\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.2184873949579832,\n \"acc_stderr\": 0.026841514322958948,\n \"acc_norm\": 0.2184873949579832,\n \"acc_norm_stderr\": 0.026841514322958948\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.271523178807947,\n \"acc_stderr\": 0.036313298039696525,\n \"acc_norm\": 0.271523178807947,\n \"acc_norm_stderr\": 0.036313298039696525\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.25871559633027524,\n \"acc_stderr\": 0.018776052319619624,\n \"acc_norm\": 0.25871559633027524,\n \"acc_norm_stderr\": 0.018776052319619624\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.46296296296296297,\n \"acc_stderr\": 0.03400603625538272,\n \"acc_norm\": 0.46296296296296297,\n \"acc_norm_stderr\": 0.03400603625538272\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.27941176470588236,\n \"acc_stderr\": 0.031493281045079556,\n \"acc_norm\": 0.27941176470588236,\n \"acc_norm_stderr\": 0.031493281045079556\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.2109704641350211,\n \"acc_stderr\": 0.026558372502661923,\n \"acc_norm\": 0.2109704641350211,\n \"acc_norm_stderr\": 0.026558372502661923\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.28699551569506726,\n \"acc_stderr\": 0.030360379710291954,\n \"acc_norm\": 0.28699551569506726,\n \"acc_norm_stderr\": 0.030360379710291954\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.25190839694656486,\n \"acc_stderr\": 0.03807387116306085,\n \"acc_norm\": 0.25190839694656486,\n \"acc_norm_stderr\": 0.03807387116306085\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.256198347107438,\n \"acc_stderr\": 0.039849796533028725,\n \"acc_norm\": 0.256198347107438,\n \"acc_norm_stderr\": 0.039849796533028725\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.040191074725573483,\n \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.040191074725573483\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22699386503067484,\n \"acc_stderr\": 0.03291099578615769,\n \"acc_norm\": 0.22699386503067484,\n \"acc_norm_stderr\": 0.03291099578615769\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.042878587513404565,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.042878587513404565\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.1794871794871795,\n \"acc_stderr\": 0.02514093595033545,\n \"acc_norm\": 0.1794871794871795,\n \"acc_norm_stderr\": 0.02514093595033545\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.24648786717752236,\n \"acc_stderr\": 0.015411308769686941,\n \"acc_norm\": 0.24648786717752236,\n \"acc_norm_stderr\": 0.015411308769686941\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.25722543352601157,\n \"acc_stderr\": 0.02353292543104428,\n \"acc_norm\": 0.25722543352601157,\n \"acc_norm_stderr\": 0.02353292543104428\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23687150837988827,\n \"acc_stderr\": 0.01421957078810398,\n \"acc_norm\": 0.23687150837988827,\n \"acc_norm_stderr\": 0.01421957078810398\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.2549019607843137,\n \"acc_stderr\": 0.02495418432487991,\n \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.02495418432487991\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.24115755627009647,\n \"acc_stderr\": 0.024296594034763426,\n \"acc_norm\": 0.24115755627009647,\n \"acc_norm_stderr\": 0.024296594034763426\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.24074074074074073,\n \"acc_stderr\": 0.023788583551658544,\n \"acc_norm\": 0.24074074074074073,\n \"acc_norm_stderr\": 0.023788583551658544\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.24822695035460993,\n \"acc_stderr\": 0.025770015644290413,\n \"acc_norm\": 0.24822695035460993,\n \"acc_norm_stderr\": 0.025770015644290413\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.24902216427640156,\n \"acc_stderr\": 0.011044892264040772,\n \"acc_norm\": 0.24902216427640156,\n \"acc_norm_stderr\": 0.011044892264040772\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.4227941176470588,\n \"acc_stderr\": 0.03000856284500347,\n \"acc_norm\": 0.4227941176470588,\n \"acc_norm_stderr\": 0.03000856284500347\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.24673202614379086,\n \"acc_stderr\": 0.0174408203674025,\n \"acc_norm\": 0.24673202614379086,\n \"acc_norm_stderr\": 0.0174408203674025\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.19090909090909092,\n \"acc_stderr\": 0.03764425585984924,\n \"acc_norm\": 0.19090909090909092,\n \"acc_norm_stderr\": 0.03764425585984924\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.3877551020408163,\n \"acc_stderr\": 0.031192230726795656,\n \"acc_norm\": 0.3877551020408163,\n \"acc_norm_stderr\": 0.031192230726795656\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.25870646766169153,\n \"acc_stderr\": 0.030965903123573026,\n \"acc_norm\": 0.25870646766169153,\n \"acc_norm_stderr\": 0.030965903123573026\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.2891566265060241,\n \"acc_stderr\": 0.03529486801511115,\n \"acc_norm\": 0.2891566265060241,\n \"acc_norm_stderr\": 0.03529486801511115\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.2573099415204678,\n \"acc_stderr\": 0.03352799844161865,\n \"acc_norm\": 0.2573099415204678,\n \"acc_norm_stderr\": 0.03352799844161865\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.23378212974296206,\n \"mc1_stderr\": 0.014816195991931572,\n \"mc2\": 0.4240744665255174,\n \"mc2_stderr\": 0.014948776413812296\n }\n}\n```", "repo_url": "https://huggingface.co/Kunhao/pile-7b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|arc:challenge|25_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hellaswag|10_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T14:02:00.215909.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T14:02:00.215909.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T14_02_00.215909", "path": ["results_2023-08-17T14:02:00.215909.parquet"]}, {"split": "latest", "path": ["results_2023-08-17T14:02:00.215909.parquet"]}]}]}
2023-08-27T11:40:00+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Kunhao/pile-7b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model Kunhao/pile-7b on the Open LLM Leaderboard. The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-08-17T14:02:00.215909 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of Kunhao/pile-7b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Kunhao/pile-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-08-17T14:02:00.215909 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Kunhao/pile-7b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Kunhao/pile-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-08-17T14:02:00.215909 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 17, 31, 165, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Kunhao/pile-7b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Kunhao/pile-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-08-17T14:02:00.215909 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
4c28c9e2b2191bed008d9efd2c777f8b070cab0d
# Dataset Card for Evaluation run of Kunhao/pile-7b-250b-tokens ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Kunhao/pile-7b-250b-tokens - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Kunhao/pile-7b-250b-tokens](https://huggingface.co/Kunhao/pile-7b-250b-tokens) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Kunhao__pile-7b-250b-tokens", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-17T06:31:55.680940](https://huggingface.co/datasets/open-llm-leaderboard/details_Kunhao__pile-7b-250b-tokens/blob/main/results_2023-09-17T06-31-55.680940.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0016778523489932886, "em_stderr": 0.00041913301788268413, "f1": 0.02978817114093967, "f1_stderr": 0.0010045845151481873, "acc": 0.26666299658982046, "acc_stderr": 0.008015854967176925 }, "harness|drop|3": { "em": 0.0016778523489932886, "em_stderr": 0.00041913301788268413, "f1": 0.02978817114093967, "f1_stderr": 0.0010045845151481873 }, "harness|gsm8k|5": { "acc": 0.00530705079605762, "acc_stderr": 0.002001305720948061 }, "harness|winogrande|5": { "acc": 0.5280189423835833, "acc_stderr": 0.014030404213405788 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_Kunhao__pile-7b-250b-tokens
[ "region:us" ]
2023-08-18T17:39:56+00:00
{"pretty_name": "Evaluation run of Kunhao/pile-7b-250b-tokens", "dataset_summary": "Dataset automatically created during the evaluation run of model [Kunhao/pile-7b-250b-tokens](https://huggingface.co/Kunhao/pile-7b-250b-tokens) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Kunhao__pile-7b-250b-tokens\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-17T06:31:55.680940](https://huggingface.co/datasets/open-llm-leaderboard/details_Kunhao__pile-7b-250b-tokens/blob/main/results_2023-09-17T06-31-55.680940.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0016778523489932886,\n \"em_stderr\": 0.00041913301788268413,\n \"f1\": 0.02978817114093967,\n \"f1_stderr\": 0.0010045845151481873,\n \"acc\": 0.26666299658982046,\n \"acc_stderr\": 0.008015854967176925\n },\n \"harness|drop|3\": {\n \"em\": 0.0016778523489932886,\n \"em_stderr\": 0.00041913301788268413,\n \"f1\": 0.02978817114093967,\n \"f1_stderr\": 0.0010045845151481873\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.00530705079605762,\n \"acc_stderr\": 0.002001305720948061\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5280189423835833,\n \"acc_stderr\": 0.014030404213405788\n }\n}\n```", "repo_url": "https://huggingface.co/Kunhao/pile-7b-250b-tokens", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|arc:challenge|25_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_17T06_31_55.680940", "path": ["**/details_harness|drop|3_2023-09-17T06-31-55.680940.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-17T06-31-55.680940.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_17T06_31_55.680940", "path": ["**/details_harness|gsm8k|5_2023-09-17T06-31-55.680940.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-17T06-31-55.680940.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hellaswag|10_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T19:43:31.029227.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T19:43:31.029227.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T19:43:31.029227.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_17T06_31_55.680940", "path": ["**/details_harness|winogrande|5_2023-09-17T06-31-55.680940.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-17T06-31-55.680940.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T19_43_31.029227", "path": ["results_2023-08-17T19:43:31.029227.parquet"]}, {"split": "2023_09_17T06_31_55.680940", "path": ["results_2023-09-17T06-31-55.680940.parquet"]}, {"split": "latest", "path": ["results_2023-09-17T06-31-55.680940.parquet"]}]}]}
2023-09-17T05:32:07+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Kunhao/pile-7b-250b-tokens ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model Kunhao/pile-7b-250b-tokens on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-17T06:31:55.680940(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of Kunhao/pile-7b-250b-tokens", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Kunhao/pile-7b-250b-tokens on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T06:31:55.680940(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Kunhao/pile-7b-250b-tokens", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Kunhao/pile-7b-250b-tokens on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T06:31:55.680940(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 23, 31, 171, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Kunhao/pile-7b-250b-tokens## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Kunhao/pile-7b-250b-tokens on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-17T06:31:55.680940(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
735bca0f4932f884d071e92aec50c6240049d9f9
# Dataset Card for Evaluation run of Corianas/1.3b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Corianas/1.3b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Corianas/1.3b](https://huggingface.co/Corianas/1.3b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Corianas__1.3b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-15T03:40:11.445495](https://huggingface.co/datasets/open-llm-leaderboard/details_Corianas__1.3b/blob/main/results_2023-10-15T03-40-11.445495.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0019924496644295304, "em_stderr": 0.00045666764626669994, "f1": 0.045740352348993464, "f1_stderr": 0.001213536763017523, "acc": 0.2659515202794684, "acc_stderr": 0.007549145093989003 }, "harness|drop|3": { "em": 0.0019924496644295304, "em_stderr": 0.00045666764626669994, "f1": 0.045740352348993464, "f1_stderr": 0.001213536763017523 }, "harness|gsm8k|5": { "acc": 0.001516300227445034, "acc_stderr": 0.0010717793485492606 }, "harness|winogrande|5": { "acc": 0.5303867403314917, "acc_stderr": 0.014026510839428746 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_Corianas__1.3b
[ "region:us" ]
2023-08-18T17:40:05+00:00
{"pretty_name": "Evaluation run of Corianas/1.3b", "dataset_summary": "Dataset automatically created during the evaluation run of model [Corianas/1.3b](https://huggingface.co/Corianas/1.3b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Corianas__1.3b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-15T03:40:11.445495](https://huggingface.co/datasets/open-llm-leaderboard/details_Corianas__1.3b/blob/main/results_2023-10-15T03-40-11.445495.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0019924496644295304,\n \"em_stderr\": 0.00045666764626669994,\n \"f1\": 0.045740352348993464,\n \"f1_stderr\": 0.001213536763017523,\n \"acc\": 0.2659515202794684,\n \"acc_stderr\": 0.007549145093989003\n },\n \"harness|drop|3\": {\n \"em\": 0.0019924496644295304,\n \"em_stderr\": 0.00045666764626669994,\n \"f1\": 0.045740352348993464,\n \"f1_stderr\": 0.001213536763017523\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.001516300227445034,\n \"acc_stderr\": 0.0010717793485492606\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5303867403314917,\n \"acc_stderr\": 0.014026510839428746\n }\n}\n```", "repo_url": "https://huggingface.co/Corianas/1.3b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|arc:challenge|25_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_15T03_40_11.445495", "path": ["**/details_harness|drop|3_2023-10-15T03-40-11.445495.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-15T03-40-11.445495.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_15T03_40_11.445495", "path": ["**/details_harness|gsm8k|5_2023-10-15T03-40-11.445495.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-15T03-40-11.445495.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hellaswag|10_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T07:03:11.668296.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T07:03:11.668296.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T07:03:11.668296.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_15T03_40_11.445495", "path": ["**/details_harness|winogrande|5_2023-10-15T03-40-11.445495.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-15T03-40-11.445495.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_18T07_03_11.668296", "path": ["results_2023-08-18T07:03:11.668296.parquet"]}, {"split": "2023_10_15T03_40_11.445495", "path": ["results_2023-10-15T03-40-11.445495.parquet"]}, {"split": "latest", "path": ["results_2023-10-15T03-40-11.445495.parquet"]}]}]}
2023-10-15T02:40:23+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Corianas/1.3b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model Corianas/1.3b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-15T03:40:11.445495(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of Corianas/1.3b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Corianas/1.3b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T03:40:11.445495(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Corianas/1.3b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Corianas/1.3b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T03:40:11.445495(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 15, 31, 163, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Corianas/1.3b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Corianas/1.3b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-15T03:40:11.445495(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
d2e7329ee6e3e923ea8a2899fe7df9467c56e914
# Dataset Card for Evaluation run of migtissera/Synthia-7B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/migtissera/Synthia-7B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [migtissera/Synthia-7B](https://huggingface.co/migtissera/Synthia-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_migtissera__Synthia-7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-15T06:07:54.738296](https://huggingface.co/datasets/open-llm-leaderboard/details_migtissera__Synthia-7B/blob/main/results_2023-10-15T06-07-54.738296.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.07151845637583892, "em_stderr": 0.00263897548039012, "f1": 0.14513737416107345, "f1_stderr": 0.0029452435334875074, "acc": 0.4043291747772373, "acc_stderr": 0.009561470405449964 }, "harness|drop|3": { "em": 0.07151845637583892, "em_stderr": 0.00263897548039012, "f1": 0.14513737416107345, "f1_stderr": 0.0029452435334875074 }, "harness|gsm8k|5": { "acc": 0.06595905989385899, "acc_stderr": 0.006836951192034222 }, "harness|winogrande|5": { "acc": 0.7426992896606156, "acc_stderr": 0.012285989618865708 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_migtissera__Synthia-7B
[ "region:us" ]
2023-08-18T17:40:15+00:00
{"pretty_name": "Evaluation run of migtissera/Synthia-7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [migtissera/Synthia-7B](https://huggingface.co/migtissera/Synthia-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_migtissera__Synthia-7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-15T06:07:54.738296](https://huggingface.co/datasets/open-llm-leaderboard/details_migtissera__Synthia-7B/blob/main/results_2023-10-15T06-07-54.738296.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.07151845637583892,\n \"em_stderr\": 0.00263897548039012,\n \"f1\": 0.14513737416107345,\n \"f1_stderr\": 0.0029452435334875074,\n \"acc\": 0.4043291747772373,\n \"acc_stderr\": 0.009561470405449964\n },\n \"harness|drop|3\": {\n \"em\": 0.07151845637583892,\n \"em_stderr\": 0.00263897548039012,\n \"f1\": 0.14513737416107345,\n \"f1_stderr\": 0.0029452435334875074\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.06595905989385899,\n \"acc_stderr\": 0.006836951192034222\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7426992896606156,\n \"acc_stderr\": 0.012285989618865708\n }\n}\n```", "repo_url": "https://huggingface.co/migtissera/Synthia-7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|arc:challenge|25_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_15T06_07_54.738296", "path": ["**/details_harness|drop|3_2023-10-15T06-07-54.738296.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-15T06-07-54.738296.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_15T06_07_54.738296", "path": ["**/details_harness|gsm8k|5_2023-10-15T06-07-54.738296.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-15T06-07-54.738296.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hellaswag|10_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T17:21:07.158534.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T17:21:07.158534.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T17:21:07.158534.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_15T06_07_54.738296", "path": ["**/details_harness|winogrande|5_2023-10-15T06-07-54.738296.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-15T06-07-54.738296.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T17_21_07.158534", "path": ["results_2023-08-17T17:21:07.158534.parquet"]}, {"split": "2023_10_15T06_07_54.738296", "path": ["results_2023-10-15T06-07-54.738296.parquet"]}, {"split": "latest", "path": ["results_2023-10-15T06-07-54.738296.parquet"]}]}]}
2023-10-15T05:08:06+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of migtissera/Synthia-7B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model migtissera/Synthia-7B on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-15T06:07:54.738296(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of migtissera/Synthia-7B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model migtissera/Synthia-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T06:07:54.738296(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of migtissera/Synthia-7B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model migtissera/Synthia-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T06:07:54.738296(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 19, 31, 167, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of migtissera/Synthia-7B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model migtissera/Synthia-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-15T06:07:54.738296(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
657ef4b04f44bbb977fe33ce6d0dacaa3c4988fe
# Dataset Card for Evaluation run of migtissera/Synthia-13B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/migtissera/Synthia-13B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [migtissera/Synthia-13B](https://huggingface.co/migtissera/Synthia-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_migtissera__Synthia-13B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-15T08:11:39.705325](https://huggingface.co/datasets/open-llm-leaderboard/details_migtissera__Synthia-13B/blob/main/results_2023-10-15T08-11-39.705325.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.007130872483221477, "em_stderr": 0.0008617017796718602, "f1": 0.07447776845637605, "f1_stderr": 0.0016286126770648315, "acc": 0.435392086875506, "acc_stderr": 0.010302368716354655 }, "harness|drop|3": { "em": 0.007130872483221477, "em_stderr": 0.0008617017796718602, "f1": 0.07447776845637605, "f1_stderr": 0.0016286126770648315 }, "harness|gsm8k|5": { "acc": 0.10993176648976498, "acc_stderr": 0.008616195587865404 }, "harness|winogrande|5": { "acc": 0.760852407261247, "acc_stderr": 0.011988541844843905 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_migtissera__Synthia-13B
[ "region:us" ]
2023-08-18T17:40:23+00:00
{"pretty_name": "Evaluation run of migtissera/Synthia-13B", "dataset_summary": "Dataset automatically created during the evaluation run of model [migtissera/Synthia-13B](https://huggingface.co/migtissera/Synthia-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_migtissera__Synthia-13B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-15T08:11:39.705325](https://huggingface.co/datasets/open-llm-leaderboard/details_migtissera__Synthia-13B/blob/main/results_2023-10-15T08-11-39.705325.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.007130872483221477,\n \"em_stderr\": 0.0008617017796718602,\n \"f1\": 0.07447776845637605,\n \"f1_stderr\": 0.0016286126770648315,\n \"acc\": 0.435392086875506,\n \"acc_stderr\": 0.010302368716354655\n },\n \"harness|drop|3\": {\n \"em\": 0.007130872483221477,\n \"em_stderr\": 0.0008617017796718602,\n \"f1\": 0.07447776845637605,\n \"f1_stderr\": 0.0016286126770648315\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.10993176648976498,\n \"acc_stderr\": 0.008616195587865404\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.760852407261247,\n \"acc_stderr\": 0.011988541844843905\n }\n}\n```", "repo_url": "https://huggingface.co/migtissera/Synthia-13B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|arc:challenge|25_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_15T08_11_39.705325", "path": ["**/details_harness|drop|3_2023-10-15T08-11-39.705325.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-15T08-11-39.705325.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_15T08_11_39.705325", "path": ["**/details_harness|gsm8k|5_2023-10-15T08-11-39.705325.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-15T08-11-39.705325.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hellaswag|10_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T07:48:14.366837.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T07:48:14.366837.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T07:48:14.366837.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_15T08_11_39.705325", "path": ["**/details_harness|winogrande|5_2023-10-15T08-11-39.705325.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-15T08-11-39.705325.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_18T07_48_14.366837", "path": ["results_2023-08-18T07:48:14.366837.parquet"]}, {"split": "2023_10_15T08_11_39.705325", "path": ["results_2023-10-15T08-11-39.705325.parquet"]}, {"split": "latest", "path": ["results_2023-10-15T08-11-39.705325.parquet"]}]}]}
2023-10-15T07:11:52+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of migtissera/Synthia-13B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model migtissera/Synthia-13B on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-15T08:11:39.705325(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of migtissera/Synthia-13B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model migtissera/Synthia-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T08:11:39.705325(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of migtissera/Synthia-13B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model migtissera/Synthia-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T08:11:39.705325(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 19, 31, 167, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of migtissera/Synthia-13B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model migtissera/Synthia-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-15T08:11:39.705325(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
f1ec67869ade53b43b0915e5d17ae1efee6648aa
# Dataset Card for Evaluation run of augtoma/qCammel-70x ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/augtoma/qCammel-70x - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [augtoma/qCammel-70x](https://huggingface.co/augtoma/qCammel-70x) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_augtoma__qCammel-70x", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-24T00:38:03.634221](https://huggingface.co/datasets/open-llm-leaderboard/details_augtoma__qCammel-70x/blob/main/results_2023-09-24T00-38-03.634221.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.033766778523489936, "em_stderr": 0.001849802869119515, "f1": 0.10340918624161041, "f1_stderr": 0.0022106009828094797, "acc": 0.5700654570173166, "acc_stderr": 0.011407494958111332 }, "harness|drop|3": { "em": 0.033766778523489936, "em_stderr": 0.001849802869119515, "f1": 0.10340918624161041, "f1_stderr": 0.0022106009828094797 }, "harness|gsm8k|5": { "acc": 0.2971948445792267, "acc_stderr": 0.012588685966624186 }, "harness|winogrande|5": { "acc": 0.8429360694554064, "acc_stderr": 0.010226303949598479 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_augtoma__qCammel-70x
[ "region:us" ]
2023-08-18T17:40:36+00:00
{"pretty_name": "Evaluation run of augtoma/qCammel-70x", "dataset_summary": "Dataset automatically created during the evaluation run of model [augtoma/qCammel-70x](https://huggingface.co/augtoma/qCammel-70x) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_augtoma__qCammel-70x\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-24T00:38:03.634221](https://huggingface.co/datasets/open-llm-leaderboard/details_augtoma__qCammel-70x/blob/main/results_2023-09-24T00-38-03.634221.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.033766778523489936,\n \"em_stderr\": 0.001849802869119515,\n \"f1\": 0.10340918624161041,\n \"f1_stderr\": 0.0022106009828094797,\n \"acc\": 0.5700654570173166,\n \"acc_stderr\": 0.011407494958111332\n },\n \"harness|drop|3\": {\n \"em\": 0.033766778523489936,\n \"em_stderr\": 0.001849802869119515,\n \"f1\": 0.10340918624161041,\n \"f1_stderr\": 0.0022106009828094797\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.2971948445792267,\n \"acc_stderr\": 0.012588685966624186\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8429360694554064,\n \"acc_stderr\": 0.010226303949598479\n }\n}\n```", "repo_url": "https://huggingface.co/augtoma/qCammel-70x", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|arc:challenge|25_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_24T00_38_03.634221", "path": ["**/details_harness|drop|3_2023-09-24T00-38-03.634221.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-24T00-38-03.634221.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_24T00_38_03.634221", "path": ["**/details_harness|gsm8k|5_2023-09-24T00-38-03.634221.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-24T00-38-03.634221.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hellaswag|10_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T05:27:12.496393.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T05:27:12.496393.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T05:27:12.496393.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_24T00_38_03.634221", "path": ["**/details_harness|winogrande|5_2023-09-24T00-38-03.634221.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-24T00-38-03.634221.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_18T05_27_12.496393", "path": ["results_2023-08-18T05:27:12.496393.parquet"]}, {"split": "2023_09_24T00_38_03.634221", "path": ["results_2023-09-24T00-38-03.634221.parquet"]}, {"split": "latest", "path": ["results_2023-09-24T00-38-03.634221.parquet"]}]}]}
2023-09-23T23:38:15+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of augtoma/qCammel-70x ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model augtoma/qCammel-70x on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-24T00:38:03.634221(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of augtoma/qCammel-70x", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model augtoma/qCammel-70x on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-24T00:38:03.634221(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of augtoma/qCammel-70x", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model augtoma/qCammel-70x on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-24T00:38:03.634221(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 17, 31, 165, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of augtoma/qCammel-70x## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model augtoma/qCammel-70x on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-24T00:38:03.634221(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
41970a472afc72b817a31e9b34a4c90fb4c2fffe
# Dataset Card for Evaluation run of augtoma/qCammel70 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/augtoma/qCammel70 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [augtoma/qCammel70](https://huggingface.co/augtoma/qCammel70) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_augtoma__qCammel70", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-17T14:19:52.424228](https://huggingface.co/datasets/open-llm-leaderboard/details_augtoma__qCammel70/blob/main/results_2023-10-17T14-19-52.424228.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.033766778523489936, "em_stderr": 0.001849802869119515, "f1": 0.10340918624161041, "f1_stderr": 0.0022106009828094797, "acc": 0.5700654570173166, "acc_stderr": 0.011407494958111332 }, "harness|drop|3": { "em": 0.033766778523489936, "em_stderr": 0.001849802869119515, "f1": 0.10340918624161041, "f1_stderr": 0.0022106009828094797 }, "harness|gsm8k|5": { "acc": 0.2971948445792267, "acc_stderr": 0.012588685966624186 }, "harness|winogrande|5": { "acc": 0.8429360694554064, "acc_stderr": 0.010226303949598479 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_augtoma__qCammel70
[ "region:us" ]
2023-08-18T17:40:45+00:00
{"pretty_name": "Evaluation run of augtoma/qCammel70", "dataset_summary": "Dataset automatically created during the evaluation run of model [augtoma/qCammel70](https://huggingface.co/augtoma/qCammel70) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_augtoma__qCammel70\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-17T14:19:52.424228](https://huggingface.co/datasets/open-llm-leaderboard/details_augtoma__qCammel70/blob/main/results_2023-10-17T14-19-52.424228.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.033766778523489936,\n \"em_stderr\": 0.001849802869119515,\n \"f1\": 0.10340918624161041,\n \"f1_stderr\": 0.0022106009828094797,\n \"acc\": 0.5700654570173166,\n \"acc_stderr\": 0.011407494958111332\n },\n \"harness|drop|3\": {\n \"em\": 0.033766778523489936,\n \"em_stderr\": 0.001849802869119515,\n \"f1\": 0.10340918624161041,\n \"f1_stderr\": 0.0022106009828094797\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.2971948445792267,\n \"acc_stderr\": 0.012588685966624186\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8429360694554064,\n \"acc_stderr\": 0.010226303949598479\n }\n}\n```", "repo_url": "https://huggingface.co/augtoma/qCammel70", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|arc:challenge|25_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_17T14_19_52.424228", "path": ["**/details_harness|drop|3_2023-10-17T14-19-52.424228.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-17T14-19-52.424228.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_17T14_19_52.424228", "path": ["**/details_harness|gsm8k|5_2023-10-17T14-19-52.424228.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-17T14-19-52.424228.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hellaswag|10_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T06:33:28.828480.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T06:33:28.828480.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T06:33:28.828480.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_17T14_19_52.424228", "path": ["**/details_harness|winogrande|5_2023-10-17T14-19-52.424228.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-17T14-19-52.424228.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_18T06_33_28.828480", "path": ["results_2023-08-18T06:33:28.828480.parquet"]}, {"split": "2023_10_17T14_19_52.424228", "path": ["results_2023-10-17T14-19-52.424228.parquet"]}, {"split": "latest", "path": ["results_2023-10-17T14-19-52.424228.parquet"]}]}]}
2023-10-17T13:20:05+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of augtoma/qCammel70 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model augtoma/qCammel70 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-17T14:19:52.424228(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of augtoma/qCammel70", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model augtoma/qCammel70 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-17T14:19:52.424228(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of augtoma/qCammel70", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model augtoma/qCammel70 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-17T14:19:52.424228(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 16, 31, 164, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of augtoma/qCammel70## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model augtoma/qCammel70 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-17T14:19:52.424228(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
43b2316838fb01e1e03783a235f159a141c38142
# Dataset Card for Evaluation run of OpenBuddy/openbuddy-openllama-3b-v10-bf16 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/OpenBuddy/openbuddy-openllama-3b-v10-bf16 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [OpenBuddy/openbuddy-openllama-3b-v10-bf16](https://huggingface.co/OpenBuddy/openbuddy-openllama-3b-v10-bf16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_OpenBuddy__openbuddy-openllama-3b-v10-bf16", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-15T08:49:40.172924](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenBuddy__openbuddy-openllama-3b-v10-bf16/blob/main/results_2023-10-15T08-49-40.172924.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.05757130872483222, "em_stderr": 0.0023854315115358956, "f1": 0.10502097315436239, "f1_stderr": 0.002651285925411262, "acc": 0.30327051717566045, "acc_stderr": 0.008254166931468953 }, "harness|drop|3": { "em": 0.05757130872483222, "em_stderr": 0.0023854315115358956, "f1": 0.10502097315436239, "f1_stderr": 0.002651285925411262 }, "harness|gsm8k|5": { "acc": 0.009855951478392721, "acc_stderr": 0.0027210765770416608 }, "harness|winogrande|5": { "acc": 0.5966850828729282, "acc_stderr": 0.013787257285896245 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_OpenBuddy__openbuddy-openllama-3b-v10-bf16
[ "region:us" ]
2023-08-18T17:40:57+00:00
{"pretty_name": "Evaluation run of OpenBuddy/openbuddy-openllama-3b-v10-bf16", "dataset_summary": "Dataset automatically created during the evaluation run of model [OpenBuddy/openbuddy-openllama-3b-v10-bf16](https://huggingface.co/OpenBuddy/openbuddy-openllama-3b-v10-bf16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_OpenBuddy__openbuddy-openllama-3b-v10-bf16\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-15T08:49:40.172924](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenBuddy__openbuddy-openllama-3b-v10-bf16/blob/main/results_2023-10-15T08-49-40.172924.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.05757130872483222,\n \"em_stderr\": 0.0023854315115358956,\n \"f1\": 0.10502097315436239,\n \"f1_stderr\": 0.002651285925411262,\n \"acc\": 0.30327051717566045,\n \"acc_stderr\": 0.008254166931468953\n },\n \"harness|drop|3\": {\n \"em\": 0.05757130872483222,\n \"em_stderr\": 0.0023854315115358956,\n \"f1\": 0.10502097315436239,\n \"f1_stderr\": 0.002651285925411262\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.009855951478392721,\n \"acc_stderr\": 0.0027210765770416608\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5966850828729282,\n \"acc_stderr\": 0.013787257285896245\n }\n}\n```", "repo_url": "https://huggingface.co/OpenBuddy/openbuddy-openllama-3b-v10-bf16", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|arc:challenge|25_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_15T08_49_40.172924", "path": ["**/details_harness|drop|3_2023-10-15T08-49-40.172924.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-15T08-49-40.172924.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_15T08_49_40.172924", "path": ["**/details_harness|gsm8k|5_2023-10-15T08-49-40.172924.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-15T08-49-40.172924.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hellaswag|10_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T14:16:36.275338.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T14:16:36.275338.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T14:16:36.275338.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_15T08_49_40.172924", "path": ["**/details_harness|winogrande|5_2023-10-15T08-49-40.172924.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-15T08-49-40.172924.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T14_16_36.275338", "path": ["results_2023-08-17T14:16:36.275338.parquet"]}, {"split": "2023_10_15T08_49_40.172924", "path": ["results_2023-10-15T08-49-40.172924.parquet"]}, {"split": "latest", "path": ["results_2023-10-15T08-49-40.172924.parquet"]}]}]}
2023-10-15T07:49:52+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of OpenBuddy/openbuddy-openllama-3b-v10-bf16 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model OpenBuddy/openbuddy-openllama-3b-v10-bf16 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-15T08:49:40.172924(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of OpenBuddy/openbuddy-openllama-3b-v10-bf16", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model OpenBuddy/openbuddy-openllama-3b-v10-bf16 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T08:49:40.172924(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of OpenBuddy/openbuddy-openllama-3b-v10-bf16", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model OpenBuddy/openbuddy-openllama-3b-v10-bf16 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T08:49:40.172924(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 28, 31, 176, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of OpenBuddy/openbuddy-openllama-3b-v10-bf16## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model OpenBuddy/openbuddy-openllama-3b-v10-bf16 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-15T08:49:40.172924(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
319875e981f8b56f5c9a7060d16148de32234403
# Dataset Card for Evaluation run of OpenBuddy/openbuddy-atom-13b-v9-bf16 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/OpenBuddy/openbuddy-atom-13b-v9-bf16 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [OpenBuddy/openbuddy-atom-13b-v9-bf16](https://huggingface.co/OpenBuddy/openbuddy-atom-13b-v9-bf16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_OpenBuddy__openbuddy-atom-13b-v9-bf16", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-15T21:37:39.062296](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenBuddy__openbuddy-atom-13b-v9-bf16/blob/main/results_2023-10-15T21-37-39.062296.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.057466442953020135, "em_stderr": 0.0023833905882384974, "f1": 0.11402369966442945, "f1_stderr": 0.0026622077831256583, "acc": 0.44356628547732635, "acc_stderr": 0.011184922703096678 }, "harness|drop|3": { "em": 0.057466442953020135, "em_stderr": 0.0023833905882384974, "f1": 0.11402369966442945, "f1_stderr": 0.0026622077831256583 }, "harness|gsm8k|5": { "acc": 0.15390447308567096, "acc_stderr": 0.00993979930404902 }, "harness|winogrande|5": { "acc": 0.7332280978689818, "acc_stderr": 0.012430046102144337 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_OpenBuddy__openbuddy-atom-13b-v9-bf16
[ "region:us" ]
2023-08-18T17:41:06+00:00
{"pretty_name": "Evaluation run of OpenBuddy/openbuddy-atom-13b-v9-bf16", "dataset_summary": "Dataset automatically created during the evaluation run of model [OpenBuddy/openbuddy-atom-13b-v9-bf16](https://huggingface.co/OpenBuddy/openbuddy-atom-13b-v9-bf16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_OpenBuddy__openbuddy-atom-13b-v9-bf16\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-15T21:37:39.062296](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenBuddy__openbuddy-atom-13b-v9-bf16/blob/main/results_2023-10-15T21-37-39.062296.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.057466442953020135,\n \"em_stderr\": 0.0023833905882384974,\n \"f1\": 0.11402369966442945,\n \"f1_stderr\": 0.0026622077831256583,\n \"acc\": 0.44356628547732635,\n \"acc_stderr\": 0.011184922703096678\n },\n \"harness|drop|3\": {\n \"em\": 0.057466442953020135,\n \"em_stderr\": 0.0023833905882384974,\n \"f1\": 0.11402369966442945,\n \"f1_stderr\": 0.0026622077831256583\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.15390447308567096,\n \"acc_stderr\": 0.00993979930404902\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7332280978689818,\n \"acc_stderr\": 0.012430046102144337\n }\n}\n```", "repo_url": "https://huggingface.co/OpenBuddy/openbuddy-atom-13b-v9-bf16", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|arc:challenge|25_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_15T21_37_39.062296", "path": ["**/details_harness|drop|3_2023-10-15T21-37-39.062296.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-15T21-37-39.062296.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_15T21_37_39.062296", "path": ["**/details_harness|gsm8k|5_2023-10-15T21-37-39.062296.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-15T21-37-39.062296.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hellaswag|10_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T18:31:32.257089.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T18:31:32.257089.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T18:31:32.257089.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_15T21_37_39.062296", "path": ["**/details_harness|winogrande|5_2023-10-15T21-37-39.062296.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-15T21-37-39.062296.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T18_31_32.257089", "path": ["results_2023-08-17T18:31:32.257089.parquet"]}, {"split": "2023_10_15T21_37_39.062296", "path": ["results_2023-10-15T21-37-39.062296.parquet"]}, {"split": "latest", "path": ["results_2023-10-15T21-37-39.062296.parquet"]}]}]}
2023-10-15T20:37:51+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of OpenBuddy/openbuddy-atom-13b-v9-bf16 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model OpenBuddy/openbuddy-atom-13b-v9-bf16 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-15T21:37:39.062296(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of OpenBuddy/openbuddy-atom-13b-v9-bf16", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model OpenBuddy/openbuddy-atom-13b-v9-bf16 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T21:37:39.062296(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of OpenBuddy/openbuddy-atom-13b-v9-bf16", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model OpenBuddy/openbuddy-atom-13b-v9-bf16 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T21:37:39.062296(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 27, 31, 175, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of OpenBuddy/openbuddy-atom-13b-v9-bf16## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model OpenBuddy/openbuddy-atom-13b-v9-bf16 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-15T21:37:39.062296(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
820c92a8e91928c5a2bf98127ed33d30540d1121
# Dataset Card for Evaluation run of HWERI/pythia-1.4b-deduped-sharegpt ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/HWERI/pythia-1.4b-deduped-sharegpt - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [HWERI/pythia-1.4b-deduped-sharegpt](https://huggingface.co/HWERI/pythia-1.4b-deduped-sharegpt) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_HWERI__pythia-1.4b-deduped-sharegpt", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-16T20:15:27.580598](https://huggingface.co/datasets/open-llm-leaderboard/details_HWERI__pythia-1.4b-deduped-sharegpt/blob/main/results_2023-09-16T20-15-27.580598.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0010486577181208054, "em_stderr": 0.00033145814652192217, "f1": 0.04875104865771823, "f1_stderr": 0.0012458540332815637, "acc": 0.2804129195481258, "acc_stderr": 0.008239894933698364 }, "harness|drop|3": { "em": 0.0010486577181208054, "em_stderr": 0.00033145814652192217, "f1": 0.04875104865771823, "f1_stderr": 0.0012458540332815637 }, "harness|gsm8k|5": { "acc": 0.008339651250947688, "acc_stderr": 0.002504942226860534 }, "harness|winogrande|5": { "acc": 0.5524861878453039, "acc_stderr": 0.013974847640536194 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_HWERI__pythia-1.4b-deduped-sharegpt
[ "region:us" ]
2023-08-18T17:41:15+00:00
{"pretty_name": "Evaluation run of HWERI/pythia-1.4b-deduped-sharegpt", "dataset_summary": "Dataset automatically created during the evaluation run of model [HWERI/pythia-1.4b-deduped-sharegpt](https://huggingface.co/HWERI/pythia-1.4b-deduped-sharegpt) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_HWERI__pythia-1.4b-deduped-sharegpt\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-16T20:15:27.580598](https://huggingface.co/datasets/open-llm-leaderboard/details_HWERI__pythia-1.4b-deduped-sharegpt/blob/main/results_2023-09-16T20-15-27.580598.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0010486577181208054,\n \"em_stderr\": 0.00033145814652192217,\n \"f1\": 0.04875104865771823,\n \"f1_stderr\": 0.0012458540332815637,\n \"acc\": 0.2804129195481258,\n \"acc_stderr\": 0.008239894933698364\n },\n \"harness|drop|3\": {\n \"em\": 0.0010486577181208054,\n \"em_stderr\": 0.00033145814652192217,\n \"f1\": 0.04875104865771823,\n \"f1_stderr\": 0.0012458540332815637\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.008339651250947688,\n \"acc_stderr\": 0.002504942226860534\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5524861878453039,\n \"acc_stderr\": 0.013974847640536194\n }\n}\n```", "repo_url": "https://huggingface.co/HWERI/pythia-1.4b-deduped-sharegpt", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|arc:challenge|25_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_16T20_15_27.580598", "path": ["**/details_harness|drop|3_2023-09-16T20-15-27.580598.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-16T20-15-27.580598.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_16T20_15_27.580598", "path": ["**/details_harness|gsm8k|5_2023-09-16T20-15-27.580598.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-16T20-15-27.580598.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hellaswag|10_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T18:24:42.073512.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T18:24:42.073512.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T18:24:42.073512.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_16T20_15_27.580598", "path": ["**/details_harness|winogrande|5_2023-09-16T20-15-27.580598.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-16T20-15-27.580598.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T18_24_42.073512", "path": ["results_2023-08-17T18:24:42.073512.parquet"]}, {"split": "2023_09_16T20_15_27.580598", "path": ["results_2023-09-16T20-15-27.580598.parquet"]}, {"split": "latest", "path": ["results_2023-09-16T20-15-27.580598.parquet"]}]}]}
2023-09-16T19:15:38+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of HWERI/pythia-1.4b-deduped-sharegpt ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model HWERI/pythia-1.4b-deduped-sharegpt on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-16T20:15:27.580598(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of HWERI/pythia-1.4b-deduped-sharegpt", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model HWERI/pythia-1.4b-deduped-sharegpt on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-16T20:15:27.580598(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of HWERI/pythia-1.4b-deduped-sharegpt", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model HWERI/pythia-1.4b-deduped-sharegpt on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-16T20:15:27.580598(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 26, 31, 174, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of HWERI/pythia-1.4b-deduped-sharegpt## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model HWERI/pythia-1.4b-deduped-sharegpt on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-16T20:15:27.580598(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
512eeabdd49000e09b1aee66f6a72fe95ec8940a
# Dataset Card for Evaluation run of grimpep/llama2-22B-GPLATTY ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/grimpep/llama2-22B-GPLATTY - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [grimpep/llama2-22B-GPLATTY](https://huggingface.co/grimpep/llama2-22B-GPLATTY) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_grimpep__llama2-22B-GPLATTY", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-17T14:31:52.312230](https://huggingface.co/datasets/open-llm-leaderboard/details_grimpep__llama2-22B-GPLATTY/blob/main/results_2023-08-17T14%3A31%3A52.312230.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5471453447112705, "acc_stderr": 0.034452241209601206, "acc_norm": 0.550874294679223, "acc_norm_stderr": 0.03443332656790291, "mc1": 0.31334149326805383, "mc1_stderr": 0.0162380650690596, "mc2": 0.4692973392633332, "mc2_stderr": 0.0156700439246235 }, "harness|arc:challenge|25": { "acc": 0.560580204778157, "acc_stderr": 0.014503747823580123, "acc_norm": 0.5895904436860068, "acc_norm_stderr": 0.014374922192642662 }, "harness|hellaswag|10": { "acc": 0.6290579565823541, "acc_stderr": 0.004820697457420421, "acc_norm": 0.8200557657837084, "acc_norm_stderr": 0.003833559228158675 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.04793724854411022, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411022 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5777777777777777, "acc_stderr": 0.04266763404099582, "acc_norm": 0.5777777777777777, "acc_norm_stderr": 0.04266763404099582 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5723684210526315, "acc_stderr": 0.04026097083296564, "acc_norm": 0.5723684210526315, "acc_norm_stderr": 0.04026097083296564 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5924528301886792, "acc_stderr": 0.03024223380085449, "acc_norm": 0.5924528301886792, "acc_norm_stderr": 0.03024223380085449 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6319444444444444, "acc_stderr": 0.04032999053960718, "acc_norm": 0.6319444444444444, "acc_norm_stderr": 0.04032999053960718 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.48554913294797686, "acc_stderr": 0.03810871630454764, "acc_norm": 0.48554913294797686, "acc_norm_stderr": 0.03810871630454764 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.24509803921568626, "acc_stderr": 0.042801058373643966, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.042801058373643966 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4425531914893617, "acc_stderr": 0.03246956919789958, "acc_norm": 0.4425531914893617, "acc_norm_stderr": 0.03246956919789958 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2719298245614035, "acc_stderr": 0.04185774424022056, "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.04185774424022056 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.503448275862069, "acc_stderr": 0.04166567577101579, "acc_norm": 0.503448275862069, "acc_norm_stderr": 0.04166567577101579 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3306878306878307, "acc_stderr": 0.024229965298425082, "acc_norm": 0.3306878306878307, "acc_norm_stderr": 0.024229965298425082 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.31746031746031744, "acc_stderr": 0.04163453031302859, "acc_norm": 0.31746031746031744, "acc_norm_stderr": 0.04163453031302859 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6258064516129033, "acc_stderr": 0.027528904299845697, "acc_norm": 0.6258064516129033, "acc_norm_stderr": 0.027528904299845697 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4433497536945813, "acc_stderr": 0.03495334582162934, "acc_norm": 0.4433497536945813, "acc_norm_stderr": 0.03495334582162934 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6484848484848484, "acc_stderr": 0.037282069986826503, "acc_norm": 0.6484848484848484, "acc_norm_stderr": 0.037282069986826503 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6818181818181818, "acc_stderr": 0.03318477333845331, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.03318477333845331 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7927461139896373, "acc_stderr": 0.02925282329180363, "acc_norm": 0.7927461139896373, "acc_norm_stderr": 0.02925282329180363 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5307692307692308, "acc_stderr": 0.025302958890850154, "acc_norm": 0.5307692307692308, "acc_norm_stderr": 0.025302958890850154 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2740740740740741, "acc_stderr": 0.027195934804085622, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.027195934804085622 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5588235294117647, "acc_stderr": 0.032252942323996406, "acc_norm": 0.5588235294117647, "acc_norm_stderr": 0.032252942323996406 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.03861557546255169, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.03861557546255169 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.728440366972477, "acc_stderr": 0.01906909836319144, "acc_norm": 0.728440366972477, "acc_norm_stderr": 0.01906909836319144 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.41203703703703703, "acc_stderr": 0.03356787758160835, "acc_norm": 0.41203703703703703, "acc_norm_stderr": 0.03356787758160835 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7696078431372549, "acc_stderr": 0.02955429260569507, "acc_norm": 0.7696078431372549, "acc_norm_stderr": 0.02955429260569507 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7383966244725738, "acc_stderr": 0.028609516716994934, "acc_norm": 0.7383966244725738, "acc_norm_stderr": 0.028609516716994934 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6636771300448431, "acc_stderr": 0.031708824268455, "acc_norm": 0.6636771300448431, "acc_norm_stderr": 0.031708824268455 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6030534351145038, "acc_stderr": 0.04291135671009224, "acc_norm": 0.6030534351145038, "acc_norm_stderr": 0.04291135671009224 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7355371900826446, "acc_stderr": 0.04026187527591205, "acc_norm": 0.7355371900826446, "acc_norm_stderr": 0.04026187527591205 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7129629629629629, "acc_stderr": 0.043733130409147614, "acc_norm": 0.7129629629629629, "acc_norm_stderr": 0.043733130409147614 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6748466257668712, "acc_stderr": 0.03680350371286461, "acc_norm": 0.6748466257668712, "acc_norm_stderr": 0.03680350371286461 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.26785714285714285, "acc_stderr": 0.04203277291467762, "acc_norm": 0.26785714285714285, "acc_norm_stderr": 0.04203277291467762 }, "harness|hendrycksTest-management|5": { "acc": 0.6796116504854369, "acc_stderr": 0.04620284082280041, "acc_norm": 0.6796116504854369, "acc_norm_stderr": 0.04620284082280041 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7649572649572649, "acc_stderr": 0.02777883590493543, "acc_norm": 0.7649572649572649, "acc_norm_stderr": 0.02777883590493543 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7598978288633461, "acc_stderr": 0.015274685213734195, "acc_norm": 0.7598978288633461, "acc_norm_stderr": 0.015274685213734195 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6213872832369942, "acc_stderr": 0.026113749361310345, "acc_norm": 0.6213872832369942, "acc_norm_stderr": 0.026113749361310345 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3564245810055866, "acc_stderr": 0.016018239710513405, "acc_norm": 0.3564245810055866, "acc_norm_stderr": 0.016018239710513405 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6176470588235294, "acc_stderr": 0.027826109307283693, "acc_norm": 0.6176470588235294, "acc_norm_stderr": 0.027826109307283693 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6334405144694534, "acc_stderr": 0.027368078243971646, "acc_norm": 0.6334405144694534, "acc_norm_stderr": 0.027368078243971646 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6327160493827161, "acc_stderr": 0.0268228017595079, "acc_norm": 0.6327160493827161, "acc_norm_stderr": 0.0268228017595079 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4148936170212766, "acc_stderr": 0.029392236584612493, "acc_norm": 0.4148936170212766, "acc_norm_stderr": 0.029392236584612493 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.39960886571056065, "acc_stderr": 0.012510181636960672, "acc_norm": 0.39960886571056065, "acc_norm_stderr": 0.012510181636960672 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5, "acc_stderr": 0.030372836961539352, "acc_norm": 0.5, "acc_norm_stderr": 0.030372836961539352 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5522875816993464, "acc_stderr": 0.020116925347422425, "acc_norm": 0.5522875816993464, "acc_norm_stderr": 0.020116925347422425 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6181818181818182, "acc_stderr": 0.046534298079135075, "acc_norm": 0.6181818181818182, "acc_norm_stderr": 0.046534298079135075 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6448979591836734, "acc_stderr": 0.030635655150387634, "acc_norm": 0.6448979591836734, "acc_norm_stderr": 0.030635655150387634 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6069651741293532, "acc_stderr": 0.0345368246603156, "acc_norm": 0.6069651741293532, "acc_norm_stderr": 0.0345368246603156 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.83, "acc_stderr": 0.03775251680686371, "acc_norm": 0.83, "acc_norm_stderr": 0.03775251680686371 }, "harness|hendrycksTest-virology|5": { "acc": 0.4457831325301205, "acc_stderr": 0.03869543323472101, "acc_norm": 0.4457831325301205, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7719298245614035, "acc_stderr": 0.032180937956023566, "acc_norm": 0.7719298245614035, "acc_norm_stderr": 0.032180937956023566 }, "harness|truthfulqa:mc|0": { "mc1": 0.31334149326805383, "mc1_stderr": 0.0162380650690596, "mc2": 0.4692973392633332, "mc2_stderr": 0.0156700439246235 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_grimpep__llama2-22B-GPLATTY
[ "region:us" ]
2023-08-18T17:41:24+00:00
{"pretty_name": "Evaluation run of grimpep/llama2-22B-GPLATTY", "dataset_summary": "Dataset automatically created during the evaluation run of model [grimpep/llama2-22B-GPLATTY](https://huggingface.co/grimpep/llama2-22B-GPLATTY) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_grimpep__llama2-22B-GPLATTY\",\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-08-17T14:31:52.312230](https://huggingface.co/datasets/open-llm-leaderboard/details_grimpep__llama2-22B-GPLATTY/blob/main/results_2023-08-17T14%3A31%3A52.312230.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5471453447112705,\n \"acc_stderr\": 0.034452241209601206,\n \"acc_norm\": 0.550874294679223,\n \"acc_norm_stderr\": 0.03443332656790291,\n \"mc1\": 0.31334149326805383,\n \"mc1_stderr\": 0.0162380650690596,\n \"mc2\": 0.4692973392633332,\n \"mc2_stderr\": 0.0156700439246235\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.560580204778157,\n \"acc_stderr\": 0.014503747823580123,\n \"acc_norm\": 0.5895904436860068,\n \"acc_norm_stderr\": 0.014374922192642662\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6290579565823541,\n \"acc_stderr\": 0.004820697457420421,\n \"acc_norm\": 0.8200557657837084,\n \"acc_norm_stderr\": 0.003833559228158675\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411022,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411022\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5777777777777777,\n \"acc_stderr\": 0.04266763404099582,\n \"acc_norm\": 0.5777777777777777,\n \"acc_norm_stderr\": 0.04266763404099582\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5723684210526315,\n \"acc_stderr\": 0.04026097083296564,\n \"acc_norm\": 0.5723684210526315,\n \"acc_norm_stderr\": 0.04026097083296564\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5924528301886792,\n \"acc_stderr\": 0.03024223380085449,\n \"acc_norm\": 0.5924528301886792,\n \"acc_norm_stderr\": 0.03024223380085449\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6319444444444444,\n \"acc_stderr\": 0.04032999053960718,\n \"acc_norm\": 0.6319444444444444,\n \"acc_norm_stderr\": 0.04032999053960718\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.48554913294797686,\n \"acc_stderr\": 0.03810871630454764,\n \"acc_norm\": 0.48554913294797686,\n \"acc_norm_stderr\": 0.03810871630454764\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.042801058373643966,\n \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.042801058373643966\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.4425531914893617,\n \"acc_stderr\": 0.03246956919789958,\n \"acc_norm\": 0.4425531914893617,\n \"acc_norm_stderr\": 0.03246956919789958\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2719298245614035,\n \"acc_stderr\": 0.04185774424022056,\n \"acc_norm\": 0.2719298245614035,\n \"acc_norm_stderr\": 0.04185774424022056\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.503448275862069,\n \"acc_stderr\": 0.04166567577101579,\n \"acc_norm\": 0.503448275862069,\n \"acc_norm_stderr\": 0.04166567577101579\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3306878306878307,\n \"acc_stderr\": 0.024229965298425082,\n \"acc_norm\": 0.3306878306878307,\n \"acc_norm_stderr\": 0.024229965298425082\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.31746031746031744,\n \"acc_stderr\": 0.04163453031302859,\n \"acc_norm\": 0.31746031746031744,\n \"acc_norm_stderr\": 0.04163453031302859\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6258064516129033,\n \"acc_stderr\": 0.027528904299845697,\n \"acc_norm\": 0.6258064516129033,\n \"acc_norm_stderr\": 0.027528904299845697\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4433497536945813,\n \"acc_stderr\": 0.03495334582162934,\n \"acc_norm\": 0.4433497536945813,\n \"acc_norm_stderr\": 0.03495334582162934\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6484848484848484,\n \"acc_stderr\": 0.037282069986826503,\n \"acc_norm\": 0.6484848484848484,\n \"acc_norm_stderr\": 0.037282069986826503\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.03318477333845331,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.03318477333845331\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7927461139896373,\n \"acc_stderr\": 0.02925282329180363,\n \"acc_norm\": 0.7927461139896373,\n \"acc_norm_stderr\": 0.02925282329180363\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5307692307692308,\n \"acc_stderr\": 0.025302958890850154,\n \"acc_norm\": 0.5307692307692308,\n \"acc_norm_stderr\": 0.025302958890850154\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2740740740740741,\n \"acc_stderr\": 0.027195934804085622,\n \"acc_norm\": 0.2740740740740741,\n \"acc_norm_stderr\": 0.027195934804085622\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5588235294117647,\n \"acc_stderr\": 0.032252942323996406,\n \"acc_norm\": 0.5588235294117647,\n \"acc_norm_stderr\": 0.032252942323996406\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.03861557546255169,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.03861557546255169\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.728440366972477,\n \"acc_stderr\": 0.01906909836319144,\n \"acc_norm\": 0.728440366972477,\n \"acc_norm_stderr\": 0.01906909836319144\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.41203703703703703,\n \"acc_stderr\": 0.03356787758160835,\n \"acc_norm\": 0.41203703703703703,\n \"acc_norm_stderr\": 0.03356787758160835\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7696078431372549,\n \"acc_stderr\": 0.02955429260569507,\n \"acc_norm\": 0.7696078431372549,\n \"acc_norm_stderr\": 0.02955429260569507\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7383966244725738,\n \"acc_stderr\": 0.028609516716994934,\n \"acc_norm\": 0.7383966244725738,\n \"acc_norm_stderr\": 0.028609516716994934\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6636771300448431,\n \"acc_stderr\": 0.031708824268455,\n \"acc_norm\": 0.6636771300448431,\n \"acc_norm_stderr\": 0.031708824268455\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6030534351145038,\n \"acc_stderr\": 0.04291135671009224,\n \"acc_norm\": 0.6030534351145038,\n \"acc_norm_stderr\": 0.04291135671009224\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7355371900826446,\n \"acc_stderr\": 0.04026187527591205,\n \"acc_norm\": 0.7355371900826446,\n \"acc_norm_stderr\": 0.04026187527591205\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7129629629629629,\n \"acc_stderr\": 0.043733130409147614,\n \"acc_norm\": 0.7129629629629629,\n \"acc_norm_stderr\": 0.043733130409147614\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6748466257668712,\n \"acc_stderr\": 0.03680350371286461,\n \"acc_norm\": 0.6748466257668712,\n \"acc_norm_stderr\": 0.03680350371286461\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.26785714285714285,\n \"acc_stderr\": 0.04203277291467762,\n \"acc_norm\": 0.26785714285714285,\n \"acc_norm_stderr\": 0.04203277291467762\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6796116504854369,\n \"acc_stderr\": 0.04620284082280041,\n \"acc_norm\": 0.6796116504854369,\n \"acc_norm_stderr\": 0.04620284082280041\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7649572649572649,\n \"acc_stderr\": 0.02777883590493543,\n \"acc_norm\": 0.7649572649572649,\n \"acc_norm_stderr\": 0.02777883590493543\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7598978288633461,\n \"acc_stderr\": 0.015274685213734195,\n \"acc_norm\": 0.7598978288633461,\n \"acc_norm_stderr\": 0.015274685213734195\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6213872832369942,\n \"acc_stderr\": 0.026113749361310345,\n \"acc_norm\": 0.6213872832369942,\n \"acc_norm_stderr\": 0.026113749361310345\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3564245810055866,\n \"acc_stderr\": 0.016018239710513405,\n \"acc_norm\": 0.3564245810055866,\n \"acc_norm_stderr\": 0.016018239710513405\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6176470588235294,\n \"acc_stderr\": 0.027826109307283693,\n \"acc_norm\": 0.6176470588235294,\n \"acc_norm_stderr\": 0.027826109307283693\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6334405144694534,\n \"acc_stderr\": 0.027368078243971646,\n \"acc_norm\": 0.6334405144694534,\n \"acc_norm_stderr\": 0.027368078243971646\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6327160493827161,\n \"acc_stderr\": 0.0268228017595079,\n \"acc_norm\": 0.6327160493827161,\n \"acc_norm_stderr\": 0.0268228017595079\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4148936170212766,\n \"acc_stderr\": 0.029392236584612493,\n \"acc_norm\": 0.4148936170212766,\n \"acc_norm_stderr\": 0.029392236584612493\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.39960886571056065,\n \"acc_stderr\": 0.012510181636960672,\n \"acc_norm\": 0.39960886571056065,\n \"acc_norm_stderr\": 0.012510181636960672\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.030372836961539352,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.030372836961539352\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5522875816993464,\n \"acc_stderr\": 0.020116925347422425,\n \"acc_norm\": 0.5522875816993464,\n \"acc_norm_stderr\": 0.020116925347422425\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6181818181818182,\n \"acc_stderr\": 0.046534298079135075,\n \"acc_norm\": 0.6181818181818182,\n \"acc_norm_stderr\": 0.046534298079135075\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6448979591836734,\n \"acc_stderr\": 0.030635655150387634,\n \"acc_norm\": 0.6448979591836734,\n \"acc_norm_stderr\": 0.030635655150387634\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6069651741293532,\n \"acc_stderr\": 0.0345368246603156,\n \"acc_norm\": 0.6069651741293532,\n \"acc_norm_stderr\": 0.0345368246603156\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.83,\n \"acc_stderr\": 0.03775251680686371,\n \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.03775251680686371\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4457831325301205,\n \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.4457831325301205,\n \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7719298245614035,\n \"acc_stderr\": 0.032180937956023566,\n \"acc_norm\": 0.7719298245614035,\n \"acc_norm_stderr\": 0.032180937956023566\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.31334149326805383,\n \"mc1_stderr\": 0.0162380650690596,\n \"mc2\": 0.4692973392633332,\n \"mc2_stderr\": 0.0156700439246235\n }\n}\n```", "repo_url": "https://huggingface.co/grimpep/llama2-22B-GPLATTY", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|arc:challenge|25_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hellaswag|10_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T14:31:52.312230.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T14:31:52.312230.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T14_31_52.312230", "path": ["results_2023-08-17T14:31:52.312230.parquet"]}, {"split": "latest", "path": ["results_2023-08-17T14:31:52.312230.parquet"]}]}]}
2023-08-27T11:40:16+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of grimpep/llama2-22B-GPLATTY ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model grimpep/llama2-22B-GPLATTY on the Open LLM Leaderboard. The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-08-17T14:31:52.312230 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of grimpep/llama2-22B-GPLATTY", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model grimpep/llama2-22B-GPLATTY on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-08-17T14:31:52.312230 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of grimpep/llama2-22B-GPLATTY", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model grimpep/llama2-22B-GPLATTY on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-08-17T14:31:52.312230 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 22, 31, 170, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of grimpep/llama2-22B-GPLATTY## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model grimpep/llama2-22B-GPLATTY on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-08-17T14:31:52.312230 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
7ecb38b856fdb5edce0a95033464bac42d6a4285
# Dataset Card for Evaluation run of grimpep/llama2-22b-wizard_vicuna ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/grimpep/llama2-22b-wizard_vicuna - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [grimpep/llama2-22b-wizard_vicuna](https://huggingface.co/grimpep/llama2-22b-wizard_vicuna) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_grimpep__llama2-22b-wizard_vicuna", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-17T14:12:20.144901](https://huggingface.co/datasets/open-llm-leaderboard/details_grimpep__llama2-22b-wizard_vicuna/blob/main/results_2023-08-17T14%3A12%3A20.144901.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5471453447112705, "acc_stderr": 0.034452241209601206, "acc_norm": 0.550874294679223, "acc_norm_stderr": 0.03443332656790291, "mc1": 0.31334149326805383, "mc1_stderr": 0.0162380650690596, "mc2": 0.4692973392633332, "mc2_stderr": 0.0156700439246235 }, "harness|arc:challenge|25": { "acc": 0.560580204778157, "acc_stderr": 0.014503747823580123, "acc_norm": 0.5895904436860068, "acc_norm_stderr": 0.014374922192642662 }, "harness|hellaswag|10": { "acc": 0.6290579565823541, "acc_stderr": 0.004820697457420421, "acc_norm": 0.8200557657837084, "acc_norm_stderr": 0.003833559228158675 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.04793724854411022, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411022 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5777777777777777, "acc_stderr": 0.04266763404099582, "acc_norm": 0.5777777777777777, "acc_norm_stderr": 0.04266763404099582 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5723684210526315, "acc_stderr": 0.04026097083296564, "acc_norm": 0.5723684210526315, "acc_norm_stderr": 0.04026097083296564 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5924528301886792, "acc_stderr": 0.03024223380085449, "acc_norm": 0.5924528301886792, "acc_norm_stderr": 0.03024223380085449 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6319444444444444, "acc_stderr": 0.04032999053960718, "acc_norm": 0.6319444444444444, "acc_norm_stderr": 0.04032999053960718 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.48554913294797686, "acc_stderr": 0.03810871630454764, "acc_norm": 0.48554913294797686, "acc_norm_stderr": 0.03810871630454764 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.24509803921568626, "acc_stderr": 0.042801058373643966, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.042801058373643966 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4425531914893617, "acc_stderr": 0.03246956919789958, "acc_norm": 0.4425531914893617, "acc_norm_stderr": 0.03246956919789958 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2719298245614035, "acc_stderr": 0.04185774424022056, "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.04185774424022056 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.503448275862069, "acc_stderr": 0.04166567577101579, "acc_norm": 0.503448275862069, "acc_norm_stderr": 0.04166567577101579 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3306878306878307, "acc_stderr": 0.024229965298425082, "acc_norm": 0.3306878306878307, "acc_norm_stderr": 0.024229965298425082 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.31746031746031744, "acc_stderr": 0.04163453031302859, "acc_norm": 0.31746031746031744, "acc_norm_stderr": 0.04163453031302859 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6258064516129033, "acc_stderr": 0.027528904299845697, "acc_norm": 0.6258064516129033, "acc_norm_stderr": 0.027528904299845697 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4433497536945813, "acc_stderr": 0.03495334582162934, "acc_norm": 0.4433497536945813, "acc_norm_stderr": 0.03495334582162934 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6484848484848484, "acc_stderr": 0.037282069986826503, "acc_norm": 0.6484848484848484, "acc_norm_stderr": 0.037282069986826503 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6818181818181818, "acc_stderr": 0.03318477333845331, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.03318477333845331 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7927461139896373, "acc_stderr": 0.02925282329180363, "acc_norm": 0.7927461139896373, "acc_norm_stderr": 0.02925282329180363 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5307692307692308, "acc_stderr": 0.025302958890850154, "acc_norm": 0.5307692307692308, "acc_norm_stderr": 0.025302958890850154 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2740740740740741, "acc_stderr": 0.027195934804085622, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.027195934804085622 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5588235294117647, "acc_stderr": 0.032252942323996406, "acc_norm": 0.5588235294117647, "acc_norm_stderr": 0.032252942323996406 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.03861557546255169, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.03861557546255169 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.728440366972477, "acc_stderr": 0.01906909836319144, "acc_norm": 0.728440366972477, "acc_norm_stderr": 0.01906909836319144 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.41203703703703703, "acc_stderr": 0.03356787758160835, "acc_norm": 0.41203703703703703, "acc_norm_stderr": 0.03356787758160835 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7696078431372549, "acc_stderr": 0.02955429260569507, "acc_norm": 0.7696078431372549, "acc_norm_stderr": 0.02955429260569507 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7383966244725738, "acc_stderr": 0.028609516716994934, "acc_norm": 0.7383966244725738, "acc_norm_stderr": 0.028609516716994934 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6636771300448431, "acc_stderr": 0.031708824268455, "acc_norm": 0.6636771300448431, "acc_norm_stderr": 0.031708824268455 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6030534351145038, "acc_stderr": 0.04291135671009224, "acc_norm": 0.6030534351145038, "acc_norm_stderr": 0.04291135671009224 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7355371900826446, "acc_stderr": 0.04026187527591205, "acc_norm": 0.7355371900826446, "acc_norm_stderr": 0.04026187527591205 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7129629629629629, "acc_stderr": 0.043733130409147614, "acc_norm": 0.7129629629629629, "acc_norm_stderr": 0.043733130409147614 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6748466257668712, "acc_stderr": 0.03680350371286461, "acc_norm": 0.6748466257668712, "acc_norm_stderr": 0.03680350371286461 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.26785714285714285, "acc_stderr": 0.04203277291467762, "acc_norm": 0.26785714285714285, "acc_norm_stderr": 0.04203277291467762 }, "harness|hendrycksTest-management|5": { "acc": 0.6796116504854369, "acc_stderr": 0.04620284082280041, "acc_norm": 0.6796116504854369, "acc_norm_stderr": 0.04620284082280041 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7649572649572649, "acc_stderr": 0.02777883590493543, "acc_norm": 0.7649572649572649, "acc_norm_stderr": 0.02777883590493543 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7598978288633461, "acc_stderr": 0.015274685213734195, "acc_norm": 0.7598978288633461, "acc_norm_stderr": 0.015274685213734195 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6213872832369942, "acc_stderr": 0.026113749361310345, "acc_norm": 0.6213872832369942, "acc_norm_stderr": 0.026113749361310345 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3564245810055866, "acc_stderr": 0.016018239710513405, "acc_norm": 0.3564245810055866, "acc_norm_stderr": 0.016018239710513405 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6176470588235294, "acc_stderr": 0.027826109307283693, "acc_norm": 0.6176470588235294, "acc_norm_stderr": 0.027826109307283693 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6334405144694534, "acc_stderr": 0.027368078243971646, "acc_norm": 0.6334405144694534, "acc_norm_stderr": 0.027368078243971646 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6327160493827161, "acc_stderr": 0.0268228017595079, "acc_norm": 0.6327160493827161, "acc_norm_stderr": 0.0268228017595079 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4148936170212766, "acc_stderr": 0.029392236584612493, "acc_norm": 0.4148936170212766, "acc_norm_stderr": 0.029392236584612493 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.39960886571056065, "acc_stderr": 0.012510181636960672, "acc_norm": 0.39960886571056065, "acc_norm_stderr": 0.012510181636960672 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5, "acc_stderr": 0.030372836961539352, "acc_norm": 0.5, "acc_norm_stderr": 0.030372836961539352 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5522875816993464, "acc_stderr": 0.020116925347422425, "acc_norm": 0.5522875816993464, "acc_norm_stderr": 0.020116925347422425 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6181818181818182, "acc_stderr": 0.046534298079135075, "acc_norm": 0.6181818181818182, "acc_norm_stderr": 0.046534298079135075 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6448979591836734, "acc_stderr": 0.030635655150387634, "acc_norm": 0.6448979591836734, "acc_norm_stderr": 0.030635655150387634 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6069651741293532, "acc_stderr": 0.0345368246603156, "acc_norm": 0.6069651741293532, "acc_norm_stderr": 0.0345368246603156 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.83, "acc_stderr": 0.03775251680686371, "acc_norm": 0.83, "acc_norm_stderr": 0.03775251680686371 }, "harness|hendrycksTest-virology|5": { "acc": 0.4457831325301205, "acc_stderr": 0.03869543323472101, "acc_norm": 0.4457831325301205, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7719298245614035, "acc_stderr": 0.032180937956023566, "acc_norm": 0.7719298245614035, "acc_norm_stderr": 0.032180937956023566 }, "harness|truthfulqa:mc|0": { "mc1": 0.31334149326805383, "mc1_stderr": 0.0162380650690596, "mc2": 0.4692973392633332, "mc2_stderr": 0.0156700439246235 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_grimpep__llama2-22b-wizard_vicuna
[ "region:us" ]
2023-08-18T17:41:32+00:00
{"pretty_name": "Evaluation run of grimpep/llama2-22b-wizard_vicuna", "dataset_summary": "Dataset automatically created during the evaluation run of model [grimpep/llama2-22b-wizard_vicuna](https://huggingface.co/grimpep/llama2-22b-wizard_vicuna) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_grimpep__llama2-22b-wizard_vicuna\",\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-08-17T14:12:20.144901](https://huggingface.co/datasets/open-llm-leaderboard/details_grimpep__llama2-22b-wizard_vicuna/blob/main/results_2023-08-17T14%3A12%3A20.144901.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5471453447112705,\n \"acc_stderr\": 0.034452241209601206,\n \"acc_norm\": 0.550874294679223,\n \"acc_norm_stderr\": 0.03443332656790291,\n \"mc1\": 0.31334149326805383,\n \"mc1_stderr\": 0.0162380650690596,\n \"mc2\": 0.4692973392633332,\n \"mc2_stderr\": 0.0156700439246235\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.560580204778157,\n \"acc_stderr\": 0.014503747823580123,\n \"acc_norm\": 0.5895904436860068,\n \"acc_norm_stderr\": 0.014374922192642662\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6290579565823541,\n \"acc_stderr\": 0.004820697457420421,\n \"acc_norm\": 0.8200557657837084,\n \"acc_norm_stderr\": 0.003833559228158675\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411022,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411022\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5777777777777777,\n \"acc_stderr\": 0.04266763404099582,\n \"acc_norm\": 0.5777777777777777,\n \"acc_norm_stderr\": 0.04266763404099582\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5723684210526315,\n \"acc_stderr\": 0.04026097083296564,\n \"acc_norm\": 0.5723684210526315,\n \"acc_norm_stderr\": 0.04026097083296564\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5924528301886792,\n \"acc_stderr\": 0.03024223380085449,\n \"acc_norm\": 0.5924528301886792,\n \"acc_norm_stderr\": 0.03024223380085449\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6319444444444444,\n \"acc_stderr\": 0.04032999053960718,\n \"acc_norm\": 0.6319444444444444,\n \"acc_norm_stderr\": 0.04032999053960718\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.48554913294797686,\n \"acc_stderr\": 0.03810871630454764,\n \"acc_norm\": 0.48554913294797686,\n \"acc_norm_stderr\": 0.03810871630454764\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.042801058373643966,\n \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.042801058373643966\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.4425531914893617,\n \"acc_stderr\": 0.03246956919789958,\n \"acc_norm\": 0.4425531914893617,\n \"acc_norm_stderr\": 0.03246956919789958\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2719298245614035,\n \"acc_stderr\": 0.04185774424022056,\n \"acc_norm\": 0.2719298245614035,\n \"acc_norm_stderr\": 0.04185774424022056\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.503448275862069,\n \"acc_stderr\": 0.04166567577101579,\n \"acc_norm\": 0.503448275862069,\n \"acc_norm_stderr\": 0.04166567577101579\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3306878306878307,\n \"acc_stderr\": 0.024229965298425082,\n \"acc_norm\": 0.3306878306878307,\n \"acc_norm_stderr\": 0.024229965298425082\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.31746031746031744,\n \"acc_stderr\": 0.04163453031302859,\n \"acc_norm\": 0.31746031746031744,\n \"acc_norm_stderr\": 0.04163453031302859\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6258064516129033,\n \"acc_stderr\": 0.027528904299845697,\n \"acc_norm\": 0.6258064516129033,\n \"acc_norm_stderr\": 0.027528904299845697\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4433497536945813,\n \"acc_stderr\": 0.03495334582162934,\n \"acc_norm\": 0.4433497536945813,\n \"acc_norm_stderr\": 0.03495334582162934\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6484848484848484,\n \"acc_stderr\": 0.037282069986826503,\n \"acc_norm\": 0.6484848484848484,\n \"acc_norm_stderr\": 0.037282069986826503\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.03318477333845331,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.03318477333845331\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7927461139896373,\n \"acc_stderr\": 0.02925282329180363,\n \"acc_norm\": 0.7927461139896373,\n \"acc_norm_stderr\": 0.02925282329180363\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5307692307692308,\n \"acc_stderr\": 0.025302958890850154,\n \"acc_norm\": 0.5307692307692308,\n \"acc_norm_stderr\": 0.025302958890850154\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2740740740740741,\n \"acc_stderr\": 0.027195934804085622,\n \"acc_norm\": 0.2740740740740741,\n \"acc_norm_stderr\": 0.027195934804085622\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5588235294117647,\n \"acc_stderr\": 0.032252942323996406,\n \"acc_norm\": 0.5588235294117647,\n \"acc_norm_stderr\": 0.032252942323996406\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.03861557546255169,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.03861557546255169\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.728440366972477,\n \"acc_stderr\": 0.01906909836319144,\n \"acc_norm\": 0.728440366972477,\n \"acc_norm_stderr\": 0.01906909836319144\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.41203703703703703,\n \"acc_stderr\": 0.03356787758160835,\n \"acc_norm\": 0.41203703703703703,\n \"acc_norm_stderr\": 0.03356787758160835\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7696078431372549,\n \"acc_stderr\": 0.02955429260569507,\n \"acc_norm\": 0.7696078431372549,\n \"acc_norm_stderr\": 0.02955429260569507\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7383966244725738,\n \"acc_stderr\": 0.028609516716994934,\n \"acc_norm\": 0.7383966244725738,\n \"acc_norm_stderr\": 0.028609516716994934\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6636771300448431,\n \"acc_stderr\": 0.031708824268455,\n \"acc_norm\": 0.6636771300448431,\n \"acc_norm_stderr\": 0.031708824268455\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6030534351145038,\n \"acc_stderr\": 0.04291135671009224,\n \"acc_norm\": 0.6030534351145038,\n \"acc_norm_stderr\": 0.04291135671009224\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7355371900826446,\n \"acc_stderr\": 0.04026187527591205,\n \"acc_norm\": 0.7355371900826446,\n \"acc_norm_stderr\": 0.04026187527591205\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7129629629629629,\n \"acc_stderr\": 0.043733130409147614,\n \"acc_norm\": 0.7129629629629629,\n \"acc_norm_stderr\": 0.043733130409147614\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6748466257668712,\n \"acc_stderr\": 0.03680350371286461,\n \"acc_norm\": 0.6748466257668712,\n \"acc_norm_stderr\": 0.03680350371286461\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.26785714285714285,\n \"acc_stderr\": 0.04203277291467762,\n \"acc_norm\": 0.26785714285714285,\n \"acc_norm_stderr\": 0.04203277291467762\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6796116504854369,\n \"acc_stderr\": 0.04620284082280041,\n \"acc_norm\": 0.6796116504854369,\n \"acc_norm_stderr\": 0.04620284082280041\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7649572649572649,\n \"acc_stderr\": 0.02777883590493543,\n \"acc_norm\": 0.7649572649572649,\n \"acc_norm_stderr\": 0.02777883590493543\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7598978288633461,\n \"acc_stderr\": 0.015274685213734195,\n \"acc_norm\": 0.7598978288633461,\n \"acc_norm_stderr\": 0.015274685213734195\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6213872832369942,\n \"acc_stderr\": 0.026113749361310345,\n \"acc_norm\": 0.6213872832369942,\n \"acc_norm_stderr\": 0.026113749361310345\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3564245810055866,\n \"acc_stderr\": 0.016018239710513405,\n \"acc_norm\": 0.3564245810055866,\n \"acc_norm_stderr\": 0.016018239710513405\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6176470588235294,\n \"acc_stderr\": 0.027826109307283693,\n \"acc_norm\": 0.6176470588235294,\n \"acc_norm_stderr\": 0.027826109307283693\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6334405144694534,\n \"acc_stderr\": 0.027368078243971646,\n \"acc_norm\": 0.6334405144694534,\n \"acc_norm_stderr\": 0.027368078243971646\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6327160493827161,\n \"acc_stderr\": 0.0268228017595079,\n \"acc_norm\": 0.6327160493827161,\n \"acc_norm_stderr\": 0.0268228017595079\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4148936170212766,\n \"acc_stderr\": 0.029392236584612493,\n \"acc_norm\": 0.4148936170212766,\n \"acc_norm_stderr\": 0.029392236584612493\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.39960886571056065,\n \"acc_stderr\": 0.012510181636960672,\n \"acc_norm\": 0.39960886571056065,\n \"acc_norm_stderr\": 0.012510181636960672\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.030372836961539352,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.030372836961539352\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5522875816993464,\n \"acc_stderr\": 0.020116925347422425,\n \"acc_norm\": 0.5522875816993464,\n \"acc_norm_stderr\": 0.020116925347422425\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6181818181818182,\n \"acc_stderr\": 0.046534298079135075,\n \"acc_norm\": 0.6181818181818182,\n \"acc_norm_stderr\": 0.046534298079135075\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6448979591836734,\n \"acc_stderr\": 0.030635655150387634,\n \"acc_norm\": 0.6448979591836734,\n \"acc_norm_stderr\": 0.030635655150387634\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6069651741293532,\n \"acc_stderr\": 0.0345368246603156,\n \"acc_norm\": 0.6069651741293532,\n \"acc_norm_stderr\": 0.0345368246603156\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.83,\n \"acc_stderr\": 0.03775251680686371,\n \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.03775251680686371\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4457831325301205,\n \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.4457831325301205,\n \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7719298245614035,\n \"acc_stderr\": 0.032180937956023566,\n \"acc_norm\": 0.7719298245614035,\n \"acc_norm_stderr\": 0.032180937956023566\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.31334149326805383,\n \"mc1_stderr\": 0.0162380650690596,\n \"mc2\": 0.4692973392633332,\n \"mc2_stderr\": 0.0156700439246235\n }\n}\n```", "repo_url": "https://huggingface.co/grimpep/llama2-22b-wizard_vicuna", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|arc:challenge|25_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hellaswag|10_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T14:12:20.144901.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T14:12:20.144901.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T14_12_20.144901", "path": ["results_2023-08-17T14:12:20.144901.parquet"]}, {"split": "latest", "path": ["results_2023-08-17T14:12:20.144901.parquet"]}]}]}
2023-08-27T11:40:18+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of grimpep/llama2-22b-wizard_vicuna ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model grimpep/llama2-22b-wizard_vicuna on the Open LLM Leaderboard. The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-08-17T14:12:20.144901 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of grimpep/llama2-22b-wizard_vicuna", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model grimpep/llama2-22b-wizard_vicuna on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-08-17T14:12:20.144901 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of grimpep/llama2-22b-wizard_vicuna", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model grimpep/llama2-22b-wizard_vicuna on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-08-17T14:12:20.144901 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 24, 31, 172, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of grimpep/llama2-22b-wizard_vicuna## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model grimpep/llama2-22b-wizard_vicuna on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-08-17T14:12:20.144901 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
8153c693631c381fab1719184e09532637a00947
# Dataset Card for Evaluation run of Enno-Ai/ennodata-7b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Enno-Ai/ennodata-7b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Enno-Ai/ennodata-7b](https://huggingface.co/Enno-Ai/ennodata-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Enno-Ai__ennodata-7b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-27T05:41:02.798297](https://huggingface.co/datasets/open-llm-leaderboard/details_Enno-Ai__ennodata-7b/blob/main/results_2023-10-27T05-41-02.798297.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0010486577181208054, "em_stderr": 0.0003314581465219155, "f1": 0.055922818791946494, "f1_stderr": 0.0012829613643597505, "acc": 0.3733497369811503, "acc_stderr": 0.008984164865569185 }, "harness|drop|3": { "em": 0.0010486577181208054, "em_stderr": 0.0003314581465219155, "f1": 0.055922818791946494, "f1_stderr": 0.0012829613643597505 }, "harness|gsm8k|5": { "acc": 0.037149355572403335, "acc_stderr": 0.005209516283073758 }, "harness|winogrande|5": { "acc": 0.7095501183898973, "acc_stderr": 0.01275881344806461 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_Enno-Ai__ennodata-7b
[ "region:us" ]
2023-08-18T17:41:41+00:00
{"pretty_name": "Evaluation run of Enno-Ai/ennodata-7b", "dataset_summary": "Dataset automatically created during the evaluation run of model [Enno-Ai/ennodata-7b](https://huggingface.co/Enno-Ai/ennodata-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Enno-Ai__ennodata-7b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-27T05:41:02.798297](https://huggingface.co/datasets/open-llm-leaderboard/details_Enno-Ai__ennodata-7b/blob/main/results_2023-10-27T05-41-02.798297.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0010486577181208054,\n \"em_stderr\": 0.0003314581465219155,\n \"f1\": 0.055922818791946494,\n \"f1_stderr\": 0.0012829613643597505,\n \"acc\": 0.3733497369811503,\n \"acc_stderr\": 0.008984164865569185\n },\n \"harness|drop|3\": {\n \"em\": 0.0010486577181208054,\n \"em_stderr\": 0.0003314581465219155,\n \"f1\": 0.055922818791946494,\n \"f1_stderr\": 0.0012829613643597505\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.037149355572403335,\n \"acc_stderr\": 0.005209516283073758\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7095501183898973,\n \"acc_stderr\": 0.01275881344806461\n }\n}\n```", "repo_url": "https://huggingface.co/Enno-Ai/ennodata-7b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|arc:challenge|25_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_27T05_41_02.798297", "path": ["**/details_harness|drop|3_2023-10-27T05-41-02.798297.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-27T05-41-02.798297.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_27T05_41_02.798297", "path": ["**/details_harness|gsm8k|5_2023-10-27T05-41-02.798297.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-27T05-41-02.798297.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hellaswag|10_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T18:21:05.699051.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T18:21:05.699051.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T18:21:05.699051.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_27T05_41_02.798297", "path": ["**/details_harness|winogrande|5_2023-10-27T05-41-02.798297.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-27T05-41-02.798297.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T18_21_05.699051", "path": ["results_2023-08-17T18:21:05.699051.parquet"]}, {"split": "2023_10_27T05_41_02.798297", "path": ["results_2023-10-27T05-41-02.798297.parquet"]}, {"split": "latest", "path": ["results_2023-10-27T05-41-02.798297.parquet"]}]}]}
2023-10-27T04:41:15+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Enno-Ai/ennodata-7b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model Enno-Ai/ennodata-7b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-27T05:41:02.798297(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of Enno-Ai/ennodata-7b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Enno-Ai/ennodata-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-27T05:41:02.798297(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Enno-Ai/ennodata-7b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Enno-Ai/ennodata-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-27T05:41:02.798297(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 19, 31, 167, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Enno-Ai/ennodata-7b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Enno-Ai/ennodata-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-27T05:41:02.798297(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
2c92ab6b52b749503975dcdabbe64e23b116c877
# Dataset Card for Evaluation run of acrastt/Marx-3B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/acrastt/Marx-3B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [acrastt/Marx-3B](https://huggingface.co/acrastt/Marx-3B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 5 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_acrastt__Marx-3B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-13T00:16:16.228115](https://huggingface.co/datasets/open-llm-leaderboard/details_acrastt__Marx-3B/blob/main/results_2023-10-13T00-16-16.228115.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0017827181208053692, "em_stderr": 0.0004320097346039128, "f1": 0.05222210570469818, "f1_stderr": 0.0012816188091647597, "acc": 0.33438429175196105, "acc_stderr": 0.00822951158575279 }, "harness|drop|3": { "em": 0.0017827181208053692, "em_stderr": 0.0004320097346039128, "f1": 0.05222210570469818, "f1_stderr": 0.0012816188091647597 }, "harness|gsm8k|5": { "acc": 0.01288855193328279, "acc_stderr": 0.003106901266499642 }, "harness|winogrande|5": { "acc": 0.6558800315706393, "acc_stderr": 0.013352121905005938 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_acrastt__Marx-3B
[ "region:us" ]
2023-08-18T17:41:53+00:00
{"pretty_name": "Evaluation run of acrastt/Marx-3B", "dataset_summary": "Dataset automatically created during the evaluation run of model [acrastt/Marx-3B](https://huggingface.co/acrastt/Marx-3B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 5 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_acrastt__Marx-3B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-13T00:16:16.228115](https://huggingface.co/datasets/open-llm-leaderboard/details_acrastt__Marx-3B/blob/main/results_2023-10-13T00-16-16.228115.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0017827181208053692,\n \"em_stderr\": 0.0004320097346039128,\n \"f1\": 0.05222210570469818,\n \"f1_stderr\": 0.0012816188091647597,\n \"acc\": 0.33438429175196105,\n \"acc_stderr\": 0.00822951158575279\n },\n \"harness|drop|3\": {\n \"em\": 0.0017827181208053692,\n \"em_stderr\": 0.0004320097346039128,\n \"f1\": 0.05222210570469818,\n \"f1_stderr\": 0.0012816188091647597\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.01288855193328279,\n \"acc_stderr\": 0.003106901266499642\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6558800315706393,\n \"acc_stderr\": 0.013352121905005938\n }\n}\n```", "repo_url": "https://huggingface.co/acrastt/Marx-3B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|arc:challenge|25_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|arc:challenge|25_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|arc:challenge|25_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|arc:challenge|25_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_13T00_16_16.228115", "path": ["**/details_harness|drop|3_2023-10-13T00-16-16.228115.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-13T00-16-16.228115.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_13T00_16_16.228115", "path": ["**/details_harness|gsm8k|5_2023-10-13T00-16-16.228115.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-13T00-16-16.228115.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hellaswag|10_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hellaswag|10_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hellaswag|10_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hellaswag|10_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T19:19:30.468267.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T23:19:44.606324.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T23:46:31.661460.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T00:59:52.593493.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T00:59:52.593493.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T00:59:52.593493.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_13T00_16_16.228115", "path": ["**/details_harness|winogrande|5_2023-10-13T00-16-16.228115.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-13T00-16-16.228115.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T19_19_30.468267", "path": ["results_2023-08-17T19:19:30.468267.parquet"]}, {"split": "2023_08_17T23_19_44.606324", "path": ["results_2023-08-17T23:19:44.606324.parquet"]}, {"split": "2023_08_17T23_46_31.661460", "path": ["results_2023-08-17T23:46:31.661460.parquet"]}, {"split": "2023_08_18T00_59_52.593493", "path": ["results_2023-08-18T00:59:52.593493.parquet"]}, {"split": "2023_10_13T00_16_16.228115", "path": ["results_2023-10-13T00-16-16.228115.parquet"]}, {"split": "latest", "path": ["results_2023-10-13T00-16-16.228115.parquet"]}]}]}
2023-10-12T23:16:29+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of acrastt/Marx-3B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model acrastt/Marx-3B on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 5 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-13T00:16:16.228115(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of acrastt/Marx-3B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model acrastt/Marx-3B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 5 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-13T00:16:16.228115(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of acrastt/Marx-3B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model acrastt/Marx-3B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 5 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-13T00:16:16.228115(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 17, 31, 165, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of acrastt/Marx-3B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model acrastt/Marx-3B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 5 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-13T00:16:16.228115(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
4ca9e19f765a4c72b29789fa9da1e77dbd6aabab
# Dataset Card for "Spirit_GPTNEO_Finetuned" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
EgilKarlsen/Spirit_GPTNEO_Finetuned
[ "region:us" ]
2023-08-18T17:41:54+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "0", "dtype": "float32"}, {"name": "1", "dtype": "float32"}, {"name": "2", "dtype": "float32"}, {"name": "3", "dtype": "float32"}, {"name": "4", "dtype": "float32"}, {"name": "5", "dtype": "float32"}, {"name": "6", "dtype": "float32"}, {"name": "7", "dtype": "float32"}, {"name": "8", "dtype": "float32"}, {"name": "9", "dtype": "float32"}, {"name": "10", "dtype": "float32"}, {"name": "11", "dtype": "float32"}, {"name": "12", "dtype": "float32"}, {"name": "13", "dtype": "float32"}, {"name": "14", "dtype": "float32"}, {"name": "15", "dtype": "float32"}, {"name": "16", "dtype": "float32"}, {"name": "17", "dtype": "float32"}, {"name": "18", "dtype": "float32"}, {"name": "19", "dtype": "float32"}, {"name": "20", "dtype": "float32"}, {"name": "21", "dtype": "float32"}, {"name": "22", "dtype": "float32"}, {"name": "23", "dtype": "float32"}, {"name": "24", "dtype": "float32"}, {"name": "25", "dtype": "float32"}, {"name": "26", "dtype": "float32"}, {"name": "27", "dtype": "float32"}, {"name": "28", "dtype": "float32"}, {"name": "29", "dtype": "float32"}, {"name": "30", "dtype": "float32"}, {"name": "31", "dtype": "float32"}, {"name": "32", "dtype": "float32"}, {"name": "33", "dtype": "float32"}, {"name": "34", "dtype": "float32"}, {"name": "35", "dtype": "float32"}, {"name": "36", "dtype": "float32"}, {"name": "37", "dtype": "float32"}, {"name": "38", "dtype": "float32"}, {"name": "39", "dtype": "float32"}, {"name": "40", "dtype": "float32"}, {"name": "41", "dtype": "float32"}, {"name": "42", "dtype": "float32"}, {"name": "43", "dtype": "float32"}, {"name": "44", "dtype": "float32"}, {"name": "45", "dtype": "float32"}, {"name": "46", "dtype": "float32"}, {"name": "47", "dtype": "float32"}, {"name": "48", "dtype": "float32"}, {"name": "49", "dtype": "float32"}, {"name": "50", "dtype": "float32"}, {"name": "51", "dtype": "float32"}, {"name": "52", "dtype": "float32"}, {"name": "53", "dtype": "float32"}, {"name": "54", "dtype": "float32"}, {"name": "55", "dtype": "float32"}, {"name": "56", "dtype": "float32"}, {"name": "57", "dtype": "float32"}, {"name": "58", "dtype": "float32"}, {"name": "59", "dtype": "float32"}, {"name": "60", "dtype": "float32"}, {"name": "61", "dtype": "float32"}, {"name": "62", "dtype": "float32"}, {"name": "63", "dtype": "float32"}, {"name": "64", "dtype": "float32"}, {"name": "65", "dtype": "float32"}, {"name": "66", "dtype": "float32"}, {"name": "67", "dtype": "float32"}, {"name": "68", "dtype": "float32"}, {"name": "69", "dtype": "float32"}, {"name": "70", "dtype": "float32"}, {"name": "71", "dtype": "float32"}, {"name": "72", "dtype": "float32"}, {"name": "73", "dtype": "float32"}, {"name": "74", "dtype": "float32"}, {"name": "75", "dtype": "float32"}, {"name": "76", "dtype": "float32"}, {"name": "77", "dtype": "float32"}, {"name": "78", "dtype": "float32"}, {"name": "79", "dtype": "float32"}, {"name": "80", "dtype": "float32"}, {"name": "81", "dtype": "float32"}, {"name": "82", "dtype": "float32"}, {"name": "83", "dtype": "float32"}, {"name": "84", "dtype": "float32"}, {"name": "85", "dtype": "float32"}, {"name": "86", "dtype": "float32"}, {"name": "87", "dtype": "float32"}, {"name": "88", "dtype": "float32"}, {"name": "89", "dtype": "float32"}, {"name": "90", "dtype": "float32"}, {"name": "91", "dtype": "float32"}, {"name": "92", "dtype": "float32"}, {"name": "93", "dtype": "float32"}, {"name": "94", "dtype": "float32"}, {"name": "95", "dtype": "float32"}, {"name": "96", "dtype": "float32"}, {"name": "97", "dtype": "float32"}, {"name": "98", "dtype": "float32"}, {"name": "99", "dtype": "float32"}, {"name": "100", "dtype": "float32"}, {"name": "101", "dtype": "float32"}, {"name": "102", "dtype": "float32"}, {"name": "103", "dtype": "float32"}, {"name": "104", "dtype": "float32"}, {"name": "105", "dtype": "float32"}, {"name": "106", "dtype": "float32"}, {"name": "107", "dtype": "float32"}, {"name": "108", "dtype": "float32"}, {"name": "109", "dtype": "float32"}, {"name": "110", "dtype": "float32"}, {"name": "111", "dtype": "float32"}, {"name": "112", "dtype": "float32"}, {"name": "113", "dtype": "float32"}, {"name": "114", "dtype": "float32"}, {"name": "115", "dtype": "float32"}, {"name": "116", "dtype": "float32"}, {"name": "117", "dtype": "float32"}, {"name": "118", "dtype": "float32"}, {"name": "119", "dtype": "float32"}, {"name": "120", "dtype": "float32"}, {"name": "121", "dtype": "float32"}, {"name": "122", "dtype": "float32"}, {"name": "123", "dtype": "float32"}, {"name": "124", "dtype": "float32"}, {"name": "125", "dtype": "float32"}, {"name": "126", "dtype": "float32"}, {"name": "127", "dtype": "float32"}, {"name": "128", "dtype": "float32"}, {"name": "129", "dtype": "float32"}, {"name": "130", "dtype": "float32"}, {"name": "131", "dtype": "float32"}, {"name": "132", "dtype": "float32"}, {"name": "133", "dtype": "float32"}, {"name": "134", "dtype": "float32"}, {"name": "135", "dtype": "float32"}, {"name": "136", "dtype": "float32"}, {"name": "137", "dtype": "float32"}, {"name": "138", "dtype": "float32"}, {"name": "139", "dtype": "float32"}, {"name": "140", "dtype": "float32"}, {"name": "141", "dtype": "float32"}, {"name": "142", "dtype": "float32"}, {"name": "143", "dtype": "float32"}, {"name": "144", "dtype": "float32"}, {"name": "145", "dtype": "float32"}, {"name": "146", "dtype": "float32"}, {"name": "147", "dtype": "float32"}, {"name": "148", "dtype": "float32"}, {"name": "149", "dtype": "float32"}, {"name": "150", "dtype": "float32"}, {"name": "151", "dtype": "float32"}, {"name": "152", "dtype": "float32"}, {"name": "153", "dtype": "float32"}, {"name": "154", "dtype": "float32"}, {"name": "155", "dtype": "float32"}, {"name": "156", "dtype": "float32"}, {"name": "157", "dtype": "float32"}, {"name": "158", "dtype": "float32"}, {"name": "159", "dtype": "float32"}, {"name": "160", "dtype": "float32"}, {"name": "161", "dtype": "float32"}, {"name": "162", "dtype": "float32"}, {"name": "163", "dtype": "float32"}, {"name": "164", "dtype": "float32"}, {"name": "165", "dtype": "float32"}, {"name": "166", "dtype": "float32"}, {"name": "167", "dtype": "float32"}, {"name": "168", "dtype": "float32"}, {"name": "169", "dtype": "float32"}, {"name": "170", "dtype": "float32"}, {"name": "171", "dtype": "float32"}, {"name": "172", "dtype": "float32"}, {"name": "173", "dtype": "float32"}, {"name": "174", "dtype": "float32"}, {"name": "175", "dtype": "float32"}, {"name": "176", "dtype": "float32"}, {"name": "177", "dtype": "float32"}, {"name": "178", "dtype": "float32"}, {"name": "179", "dtype": "float32"}, {"name": "180", "dtype": "float32"}, {"name": "181", "dtype": "float32"}, {"name": "182", "dtype": "float32"}, {"name": "183", "dtype": "float32"}, {"name": "184", "dtype": "float32"}, {"name": "185", "dtype": "float32"}, {"name": "186", "dtype": "float32"}, {"name": "187", "dtype": "float32"}, {"name": "188", "dtype": "float32"}, {"name": "189", "dtype": "float32"}, {"name": "190", "dtype": "float32"}, {"name": "191", "dtype": "float32"}, {"name": "192", "dtype": "float32"}, {"name": "193", "dtype": "float32"}, {"name": "194", "dtype": "float32"}, {"name": "195", "dtype": "float32"}, {"name": "196", "dtype": "float32"}, {"name": "197", "dtype": "float32"}, {"name": "198", "dtype": "float32"}, {"name": "199", "dtype": "float32"}, {"name": "200", "dtype": "float32"}, {"name": "201", "dtype": "float32"}, {"name": "202", "dtype": "float32"}, {"name": "203", "dtype": "float32"}, {"name": "204", "dtype": "float32"}, {"name": "205", "dtype": "float32"}, {"name": "206", "dtype": "float32"}, {"name": "207", "dtype": "float32"}, {"name": "208", "dtype": "float32"}, {"name": "209", "dtype": "float32"}, {"name": "210", "dtype": "float32"}, {"name": "211", "dtype": "float32"}, {"name": "212", "dtype": "float32"}, {"name": "213", "dtype": "float32"}, {"name": "214", "dtype": "float32"}, {"name": "215", "dtype": "float32"}, {"name": "216", "dtype": "float32"}, {"name": "217", "dtype": "float32"}, {"name": "218", "dtype": "float32"}, {"name": "219", "dtype": "float32"}, {"name": "220", "dtype": "float32"}, {"name": "221", "dtype": "float32"}, {"name": "222", "dtype": "float32"}, {"name": "223", "dtype": "float32"}, {"name": "224", "dtype": "float32"}, {"name": "225", "dtype": "float32"}, {"name": "226", "dtype": "float32"}, {"name": "227", "dtype": "float32"}, {"name": "228", "dtype": "float32"}, {"name": "229", "dtype": "float32"}, {"name": "230", "dtype": "float32"}, {"name": "231", "dtype": "float32"}, {"name": "232", "dtype": "float32"}, {"name": "233", "dtype": "float32"}, {"name": "234", "dtype": "float32"}, {"name": "235", "dtype": "float32"}, {"name": "236", "dtype": "float32"}, {"name": "237", "dtype": "float32"}, {"name": "238", "dtype": "float32"}, {"name": "239", "dtype": "float32"}, {"name": "240", "dtype": "float32"}, {"name": "241", "dtype": "float32"}, {"name": "242", "dtype": "float32"}, {"name": "243", "dtype": "float32"}, {"name": "244", "dtype": "float32"}, {"name": "245", "dtype": "float32"}, {"name": "246", "dtype": "float32"}, {"name": "247", "dtype": "float32"}, {"name": "248", "dtype": "float32"}, {"name": "249", "dtype": "float32"}, {"name": "250", "dtype": "float32"}, {"name": "251", "dtype": "float32"}, {"name": "252", "dtype": "float32"}, {"name": "253", "dtype": "float32"}, {"name": "254", "dtype": "float32"}, {"name": "255", "dtype": "float32"}, {"name": "256", "dtype": "float32"}, {"name": "257", "dtype": "float32"}, {"name": "258", "dtype": "float32"}, {"name": "259", "dtype": "float32"}, {"name": "260", "dtype": "float32"}, {"name": "261", "dtype": "float32"}, {"name": "262", "dtype": "float32"}, {"name": "263", "dtype": "float32"}, {"name": "264", "dtype": "float32"}, {"name": "265", "dtype": "float32"}, {"name": "266", "dtype": "float32"}, {"name": "267", "dtype": "float32"}, {"name": "268", "dtype": "float32"}, {"name": "269", "dtype": "float32"}, {"name": "270", "dtype": "float32"}, {"name": "271", "dtype": "float32"}, {"name": "272", "dtype": "float32"}, {"name": "273", "dtype": "float32"}, {"name": "274", "dtype": "float32"}, {"name": "275", "dtype": "float32"}, {"name": "276", "dtype": "float32"}, {"name": "277", "dtype": "float32"}, {"name": "278", "dtype": "float32"}, {"name": "279", "dtype": "float32"}, {"name": "280", "dtype": "float32"}, {"name": "281", "dtype": "float32"}, {"name": "282", "dtype": "float32"}, {"name": "283", "dtype": "float32"}, {"name": "284", "dtype": "float32"}, {"name": "285", "dtype": "float32"}, {"name": "286", "dtype": "float32"}, {"name": "287", "dtype": "float32"}, {"name": "288", "dtype": "float32"}, {"name": "289", "dtype": "float32"}, {"name": "290", "dtype": "float32"}, {"name": "291", "dtype": "float32"}, {"name": "292", "dtype": "float32"}, {"name": "293", "dtype": "float32"}, {"name": "294", "dtype": "float32"}, {"name": "295", "dtype": "float32"}, {"name": "296", "dtype": "float32"}, {"name": "297", "dtype": "float32"}, {"name": "298", "dtype": "float32"}, {"name": "299", "dtype": "float32"}, {"name": "300", "dtype": "float32"}, {"name": "301", "dtype": "float32"}, {"name": "302", "dtype": "float32"}, {"name": "303", "dtype": "float32"}, {"name": "304", "dtype": "float32"}, {"name": "305", "dtype": "float32"}, {"name": "306", "dtype": "float32"}, {"name": "307", "dtype": "float32"}, {"name": "308", "dtype": "float32"}, {"name": "309", "dtype": "float32"}, {"name": "310", "dtype": "float32"}, {"name": "311", "dtype": "float32"}, {"name": "312", "dtype": "float32"}, {"name": "313", "dtype": "float32"}, {"name": "314", "dtype": "float32"}, {"name": "315", "dtype": "float32"}, {"name": "316", "dtype": "float32"}, {"name": "317", "dtype": "float32"}, {"name": "318", "dtype": "float32"}, {"name": "319", "dtype": "float32"}, {"name": "320", "dtype": "float32"}, {"name": "321", "dtype": "float32"}, {"name": "322", "dtype": "float32"}, {"name": "323", "dtype": "float32"}, {"name": "324", "dtype": "float32"}, {"name": "325", "dtype": "float32"}, {"name": "326", "dtype": "float32"}, {"name": "327", "dtype": "float32"}, {"name": "328", "dtype": "float32"}, {"name": "329", "dtype": "float32"}, {"name": "330", "dtype": "float32"}, {"name": "331", "dtype": "float32"}, {"name": "332", "dtype": "float32"}, {"name": "333", "dtype": "float32"}, {"name": "334", "dtype": "float32"}, {"name": "335", "dtype": "float32"}, {"name": "336", "dtype": "float32"}, {"name": "337", "dtype": "float32"}, {"name": "338", "dtype": "float32"}, {"name": "339", "dtype": "float32"}, {"name": "340", "dtype": "float32"}, {"name": "341", "dtype": "float32"}, {"name": "342", "dtype": "float32"}, {"name": "343", "dtype": "float32"}, {"name": "344", "dtype": "float32"}, {"name": "345", "dtype": "float32"}, {"name": "346", "dtype": "float32"}, {"name": "347", "dtype": "float32"}, {"name": "348", "dtype": "float32"}, {"name": "349", "dtype": "float32"}, {"name": "350", "dtype": "float32"}, {"name": "351", "dtype": "float32"}, {"name": "352", "dtype": "float32"}, {"name": "353", "dtype": "float32"}, {"name": "354", "dtype": "float32"}, {"name": "355", "dtype": "float32"}, {"name": "356", "dtype": "float32"}, {"name": "357", "dtype": "float32"}, {"name": "358", "dtype": "float32"}, {"name": "359", "dtype": "float32"}, {"name": "360", "dtype": "float32"}, {"name": "361", "dtype": "float32"}, {"name": "362", "dtype": "float32"}, {"name": "363", "dtype": "float32"}, {"name": "364", "dtype": "float32"}, {"name": "365", "dtype": "float32"}, {"name": "366", "dtype": "float32"}, {"name": "367", "dtype": "float32"}, {"name": "368", "dtype": "float32"}, {"name": "369", "dtype": "float32"}, {"name": "370", "dtype": "float32"}, {"name": "371", "dtype": "float32"}, {"name": "372", "dtype": "float32"}, {"name": "373", "dtype": "float32"}, {"name": "374", "dtype": "float32"}, {"name": "375", "dtype": "float32"}, {"name": "376", "dtype": "float32"}, {"name": "377", "dtype": "float32"}, {"name": "378", "dtype": "float32"}, {"name": "379", "dtype": "float32"}, {"name": "380", "dtype": "float32"}, {"name": "381", "dtype": "float32"}, {"name": "382", "dtype": "float32"}, {"name": "383", "dtype": "float32"}, {"name": "384", "dtype": "float32"}, {"name": "385", "dtype": "float32"}, {"name": "386", "dtype": "float32"}, {"name": "387", "dtype": "float32"}, {"name": "388", "dtype": "float32"}, {"name": "389", "dtype": "float32"}, {"name": "390", "dtype": "float32"}, {"name": "391", "dtype": "float32"}, {"name": "392", "dtype": "float32"}, {"name": "393", "dtype": "float32"}, {"name": "394", "dtype": "float32"}, {"name": "395", "dtype": "float32"}, {"name": "396", "dtype": "float32"}, {"name": "397", "dtype": "float32"}, {"name": "398", "dtype": "float32"}, {"name": "399", "dtype": "float32"}, {"name": "400", "dtype": "float32"}, {"name": "401", "dtype": "float32"}, {"name": "402", "dtype": "float32"}, {"name": "403", "dtype": "float32"}, {"name": "404", "dtype": "float32"}, {"name": "405", "dtype": "float32"}, {"name": "406", "dtype": "float32"}, {"name": "407", "dtype": "float32"}, {"name": "408", "dtype": "float32"}, {"name": "409", "dtype": "float32"}, {"name": "410", "dtype": "float32"}, {"name": "411", "dtype": "float32"}, {"name": "412", "dtype": "float32"}, {"name": "413", "dtype": "float32"}, {"name": "414", "dtype": "float32"}, {"name": "415", "dtype": "float32"}, {"name": "416", "dtype": "float32"}, {"name": "417", "dtype": "float32"}, {"name": "418", "dtype": "float32"}, {"name": "419", "dtype": "float32"}, {"name": "420", "dtype": "float32"}, {"name": "421", "dtype": "float32"}, {"name": "422", "dtype": "float32"}, {"name": "423", "dtype": "float32"}, {"name": "424", "dtype": "float32"}, {"name": "425", "dtype": "float32"}, {"name": "426", "dtype": "float32"}, {"name": "427", "dtype": "float32"}, {"name": "428", "dtype": "float32"}, {"name": "429", "dtype": "float32"}, {"name": "430", "dtype": "float32"}, {"name": "431", "dtype": "float32"}, {"name": "432", "dtype": "float32"}, {"name": "433", "dtype": "float32"}, {"name": "434", "dtype": "float32"}, {"name": "435", "dtype": "float32"}, {"name": "436", "dtype": "float32"}, {"name": "437", "dtype": "float32"}, {"name": "438", "dtype": "float32"}, {"name": "439", "dtype": "float32"}, {"name": "440", "dtype": "float32"}, {"name": "441", "dtype": "float32"}, {"name": "442", "dtype": "float32"}, {"name": "443", "dtype": "float32"}, {"name": "444", "dtype": "float32"}, {"name": "445", "dtype": "float32"}, {"name": "446", "dtype": "float32"}, {"name": "447", "dtype": "float32"}, {"name": "448", "dtype": "float32"}, {"name": "449", "dtype": "float32"}, {"name": "450", "dtype": "float32"}, {"name": "451", "dtype": "float32"}, {"name": "452", "dtype": "float32"}, {"name": "453", "dtype": "float32"}, {"name": "454", "dtype": "float32"}, {"name": "455", "dtype": "float32"}, {"name": "456", "dtype": "float32"}, {"name": "457", "dtype": "float32"}, {"name": "458", "dtype": "float32"}, {"name": "459", "dtype": "float32"}, {"name": "460", "dtype": "float32"}, {"name": "461", "dtype": "float32"}, {"name": "462", "dtype": "float32"}, {"name": "463", "dtype": "float32"}, {"name": "464", "dtype": "float32"}, {"name": "465", "dtype": "float32"}, {"name": "466", "dtype": "float32"}, {"name": "467", "dtype": "float32"}, {"name": "468", "dtype": "float32"}, {"name": "469", "dtype": "float32"}, {"name": "470", "dtype": "float32"}, {"name": "471", "dtype": "float32"}, {"name": "472", "dtype": "float32"}, {"name": "473", "dtype": "float32"}, {"name": "474", "dtype": "float32"}, {"name": "475", "dtype": "float32"}, {"name": "476", "dtype": "float32"}, {"name": "477", "dtype": "float32"}, {"name": "478", "dtype": "float32"}, {"name": "479", "dtype": "float32"}, {"name": "480", "dtype": "float32"}, {"name": "481", "dtype": "float32"}, {"name": "482", "dtype": "float32"}, {"name": "483", "dtype": "float32"}, {"name": "484", "dtype": "float32"}, {"name": "485", "dtype": "float32"}, {"name": "486", "dtype": "float32"}, {"name": "487", "dtype": "float32"}, {"name": "488", "dtype": "float32"}, {"name": "489", "dtype": "float32"}, {"name": "490", "dtype": "float32"}, {"name": "491", "dtype": "float32"}, {"name": "492", "dtype": "float32"}, {"name": "493", "dtype": "float32"}, {"name": "494", "dtype": "float32"}, {"name": "495", "dtype": "float32"}, {"name": "496", "dtype": "float32"}, {"name": "497", "dtype": "float32"}, {"name": "498", "dtype": "float32"}, {"name": "499", "dtype": "float32"}, {"name": "500", "dtype": "float32"}, {"name": "501", "dtype": "float32"}, {"name": "502", "dtype": "float32"}, {"name": "503", "dtype": "float32"}, {"name": "504", "dtype": "float32"}, {"name": "505", "dtype": "float32"}, {"name": "506", "dtype": "float32"}, {"name": "507", "dtype": "float32"}, {"name": "508", "dtype": "float32"}, {"name": "509", "dtype": "float32"}, {"name": "510", "dtype": "float32"}, {"name": "511", "dtype": "float32"}, {"name": "512", "dtype": "float32"}, {"name": "513", "dtype": "float32"}, {"name": "514", "dtype": "float32"}, {"name": "515", "dtype": "float32"}, {"name": "516", "dtype": "float32"}, {"name": "517", "dtype": "float32"}, {"name": "518", "dtype": "float32"}, {"name": "519", "dtype": "float32"}, {"name": "520", "dtype": "float32"}, {"name": "521", "dtype": "float32"}, {"name": "522", "dtype": "float32"}, {"name": "523", "dtype": "float32"}, {"name": "524", "dtype": "float32"}, {"name": "525", "dtype": "float32"}, {"name": "526", "dtype": "float32"}, {"name": "527", "dtype": "float32"}, {"name": "528", "dtype": "float32"}, {"name": "529", "dtype": "float32"}, {"name": "530", "dtype": "float32"}, {"name": "531", "dtype": "float32"}, {"name": "532", "dtype": "float32"}, {"name": "533", "dtype": "float32"}, {"name": "534", "dtype": "float32"}, {"name": "535", "dtype": "float32"}, {"name": "536", "dtype": "float32"}, {"name": "537", "dtype": "float32"}, {"name": "538", "dtype": "float32"}, {"name": "539", "dtype": "float32"}, {"name": "540", "dtype": "float32"}, {"name": "541", "dtype": "float32"}, {"name": "542", "dtype": "float32"}, {"name": "543", "dtype": "float32"}, {"name": "544", "dtype": "float32"}, {"name": "545", "dtype": "float32"}, {"name": "546", "dtype": "float32"}, {"name": "547", "dtype": "float32"}, {"name": "548", "dtype": "float32"}, {"name": "549", "dtype": "float32"}, {"name": "550", "dtype": "float32"}, {"name": "551", "dtype": "float32"}, {"name": "552", "dtype": "float32"}, {"name": "553", "dtype": "float32"}, {"name": "554", "dtype": "float32"}, {"name": "555", "dtype": "float32"}, {"name": "556", "dtype": "float32"}, {"name": "557", "dtype": "float32"}, {"name": "558", "dtype": "float32"}, {"name": "559", "dtype": "float32"}, {"name": "560", "dtype": "float32"}, {"name": "561", "dtype": "float32"}, {"name": "562", "dtype": "float32"}, {"name": "563", "dtype": "float32"}, {"name": "564", "dtype": "float32"}, {"name": "565", "dtype": "float32"}, {"name": "566", "dtype": "float32"}, {"name": "567", "dtype": "float32"}, {"name": "568", "dtype": "float32"}, {"name": "569", "dtype": "float32"}, {"name": "570", "dtype": "float32"}, {"name": "571", "dtype": "float32"}, {"name": "572", "dtype": "float32"}, {"name": "573", "dtype": "float32"}, {"name": "574", "dtype": "float32"}, {"name": "575", "dtype": "float32"}, {"name": "576", "dtype": "float32"}, {"name": "577", "dtype": "float32"}, {"name": "578", "dtype": "float32"}, {"name": "579", "dtype": "float32"}, {"name": "580", "dtype": "float32"}, {"name": "581", "dtype": "float32"}, {"name": "582", "dtype": "float32"}, {"name": "583", "dtype": "float32"}, {"name": "584", "dtype": "float32"}, {"name": "585", "dtype": "float32"}, {"name": "586", "dtype": "float32"}, {"name": "587", "dtype": "float32"}, {"name": "588", "dtype": "float32"}, {"name": "589", "dtype": "float32"}, {"name": "590", "dtype": "float32"}, {"name": "591", "dtype": "float32"}, {"name": "592", "dtype": "float32"}, {"name": "593", "dtype": "float32"}, {"name": "594", "dtype": "float32"}, {"name": "595", "dtype": "float32"}, {"name": "596", "dtype": "float32"}, {"name": "597", "dtype": "float32"}, {"name": "598", "dtype": "float32"}, {"name": "599", "dtype": "float32"}, {"name": "600", "dtype": "float32"}, {"name": "601", "dtype": "float32"}, {"name": "602", "dtype": "float32"}, {"name": "603", "dtype": "float32"}, {"name": "604", "dtype": "float32"}, {"name": "605", "dtype": "float32"}, {"name": "606", "dtype": "float32"}, {"name": "607", "dtype": "float32"}, {"name": "608", "dtype": "float32"}, {"name": "609", "dtype": "float32"}, {"name": "610", "dtype": "float32"}, {"name": "611", "dtype": "float32"}, {"name": "612", "dtype": "float32"}, {"name": "613", "dtype": "float32"}, {"name": "614", "dtype": "float32"}, {"name": "615", "dtype": "float32"}, {"name": "616", "dtype": "float32"}, {"name": "617", "dtype": "float32"}, {"name": "618", "dtype": "float32"}, {"name": "619", "dtype": "float32"}, {"name": "620", "dtype": "float32"}, {"name": "621", "dtype": "float32"}, {"name": "622", "dtype": "float32"}, {"name": "623", "dtype": "float32"}, {"name": "624", "dtype": "float32"}, {"name": "625", "dtype": "float32"}, {"name": "626", "dtype": "float32"}, {"name": "627", "dtype": "float32"}, {"name": "628", "dtype": "float32"}, {"name": "629", "dtype": "float32"}, {"name": "630", "dtype": "float32"}, {"name": "631", "dtype": "float32"}, {"name": "632", "dtype": "float32"}, {"name": "633", "dtype": "float32"}, {"name": "634", "dtype": "float32"}, {"name": "635", "dtype": "float32"}, {"name": "636", "dtype": "float32"}, {"name": "637", "dtype": "float32"}, {"name": "638", "dtype": "float32"}, {"name": "639", "dtype": "float32"}, {"name": "640", "dtype": "float32"}, {"name": "641", "dtype": "float32"}, {"name": "642", "dtype": "float32"}, {"name": "643", "dtype": "float32"}, {"name": "644", "dtype": "float32"}, {"name": "645", "dtype": "float32"}, {"name": "646", "dtype": "float32"}, {"name": "647", "dtype": "float32"}, {"name": "648", "dtype": "float32"}, {"name": "649", "dtype": "float32"}, {"name": "650", "dtype": "float32"}, {"name": "651", "dtype": "float32"}, {"name": "652", "dtype": "float32"}, {"name": "653", "dtype": "float32"}, {"name": "654", "dtype": "float32"}, {"name": "655", "dtype": "float32"}, {"name": "656", "dtype": "float32"}, {"name": "657", "dtype": "float32"}, {"name": "658", "dtype": "float32"}, {"name": "659", "dtype": "float32"}, {"name": "660", "dtype": "float32"}, {"name": "661", "dtype": "float32"}, {"name": "662", "dtype": "float32"}, {"name": "663", "dtype": "float32"}, {"name": "664", "dtype": "float32"}, {"name": "665", "dtype": "float32"}, {"name": "666", "dtype": "float32"}, {"name": "667", "dtype": "float32"}, {"name": "668", "dtype": "float32"}, {"name": "669", "dtype": "float32"}, {"name": "670", "dtype": "float32"}, {"name": "671", "dtype": "float32"}, {"name": "672", "dtype": "float32"}, {"name": "673", "dtype": "float32"}, {"name": "674", "dtype": "float32"}, {"name": "675", "dtype": "float32"}, {"name": "676", "dtype": "float32"}, {"name": "677", "dtype": "float32"}, {"name": "678", "dtype": "float32"}, {"name": "679", "dtype": "float32"}, {"name": "680", "dtype": "float32"}, {"name": "681", "dtype": "float32"}, {"name": "682", "dtype": "float32"}, {"name": "683", "dtype": "float32"}, {"name": "684", "dtype": "float32"}, {"name": "685", "dtype": "float32"}, {"name": "686", "dtype": "float32"}, {"name": "687", "dtype": "float32"}, {"name": "688", "dtype": "float32"}, {"name": "689", "dtype": "float32"}, {"name": "690", "dtype": "float32"}, {"name": "691", "dtype": "float32"}, {"name": "692", "dtype": "float32"}, {"name": "693", "dtype": "float32"}, {"name": "694", "dtype": "float32"}, {"name": "695", "dtype": "float32"}, {"name": "696", "dtype": "float32"}, {"name": "697", "dtype": "float32"}, {"name": "698", "dtype": "float32"}, {"name": "699", "dtype": "float32"}, {"name": "700", "dtype": "float32"}, {"name": "701", "dtype": "float32"}, {"name": "702", "dtype": "float32"}, {"name": "703", "dtype": "float32"}, {"name": "704", "dtype": "float32"}, {"name": "705", "dtype": "float32"}, {"name": "706", "dtype": "float32"}, {"name": "707", "dtype": "float32"}, {"name": "708", "dtype": "float32"}, {"name": "709", "dtype": "float32"}, {"name": "710", "dtype": "float32"}, {"name": "711", "dtype": "float32"}, {"name": "712", "dtype": "float32"}, {"name": "713", "dtype": "float32"}, {"name": "714", "dtype": "float32"}, {"name": "715", "dtype": "float32"}, {"name": "716", "dtype": "float32"}, {"name": "717", "dtype": "float32"}, {"name": "718", "dtype": "float32"}, {"name": "719", "dtype": "float32"}, {"name": "720", "dtype": "float32"}, {"name": "721", "dtype": "float32"}, {"name": "722", "dtype": "float32"}, {"name": "723", "dtype": "float32"}, {"name": "724", "dtype": "float32"}, {"name": "725", "dtype": "float32"}, {"name": "726", "dtype": "float32"}, {"name": "727", "dtype": "float32"}, {"name": "728", "dtype": "float32"}, {"name": "729", "dtype": "float32"}, {"name": "730", "dtype": "float32"}, {"name": "731", "dtype": "float32"}, {"name": "732", "dtype": "float32"}, {"name": "733", "dtype": "float32"}, {"name": "734", "dtype": "float32"}, {"name": "735", "dtype": "float32"}, {"name": "736", "dtype": "float32"}, {"name": "737", "dtype": "float32"}, {"name": "738", "dtype": "float32"}, {"name": "739", "dtype": "float32"}, {"name": "740", "dtype": "float32"}, {"name": "741", "dtype": "float32"}, {"name": "742", "dtype": "float32"}, {"name": "743", "dtype": "float32"}, {"name": "744", "dtype": "float32"}, {"name": "745", "dtype": "float32"}, {"name": "746", "dtype": "float32"}, {"name": "747", "dtype": "float32"}, {"name": "748", "dtype": "float32"}, {"name": "749", "dtype": "float32"}, {"name": "750", "dtype": "float32"}, {"name": "751", "dtype": "float32"}, {"name": "752", "dtype": "float32"}, {"name": "753", "dtype": "float32"}, {"name": "754", "dtype": "float32"}, {"name": "755", "dtype": "float32"}, {"name": "756", "dtype": "float32"}, {"name": "757", "dtype": "float32"}, {"name": "758", "dtype": "float32"}, {"name": "759", "dtype": "float32"}, {"name": "760", "dtype": "float32"}, {"name": "761", "dtype": "float32"}, {"name": "762", "dtype": "float32"}, {"name": "763", "dtype": "float32"}, {"name": "764", "dtype": "float32"}, {"name": "765", "dtype": "float32"}, {"name": "766", "dtype": "float32"}, {"name": "767", "dtype": "float32"}, {"name": "768", "dtype": "float32"}, {"name": "769", "dtype": "float32"}, {"name": "770", "dtype": "float32"}, {"name": "771", "dtype": "float32"}, {"name": "772", "dtype": "float32"}, {"name": "773", "dtype": "float32"}, {"name": "774", "dtype": "float32"}, {"name": "775", "dtype": "float32"}, {"name": "776", "dtype": "float32"}, {"name": "777", "dtype": "float32"}, {"name": "778", "dtype": "float32"}, {"name": "779", "dtype": "float32"}, {"name": "780", "dtype": "float32"}, {"name": "781", "dtype": "float32"}, {"name": "782", "dtype": "float32"}, {"name": "783", "dtype": "float32"}, {"name": "784", "dtype": "float32"}, {"name": "785", "dtype": "float32"}, {"name": "786", "dtype": "float32"}, {"name": "787", "dtype": "float32"}, {"name": "788", "dtype": "float32"}, {"name": "789", "dtype": "float32"}, {"name": "790", "dtype": "float32"}, {"name": "791", "dtype": "float32"}, {"name": "792", "dtype": "float32"}, {"name": "793", "dtype": "float32"}, {"name": "794", "dtype": "float32"}, {"name": "795", "dtype": "float32"}, {"name": "796", "dtype": "float32"}, {"name": "797", "dtype": "float32"}, {"name": "798", "dtype": "float32"}, {"name": "799", "dtype": "float32"}, {"name": "800", "dtype": "float32"}, {"name": "801", "dtype": "float32"}, {"name": "802", "dtype": "float32"}, {"name": "803", "dtype": "float32"}, {"name": "804", "dtype": "float32"}, {"name": "805", "dtype": "float32"}, {"name": "806", "dtype": "float32"}, {"name": "807", "dtype": "float32"}, {"name": "808", "dtype": "float32"}, {"name": "809", "dtype": "float32"}, {"name": "810", "dtype": "float32"}, {"name": "811", "dtype": "float32"}, {"name": "812", "dtype": "float32"}, {"name": "813", "dtype": "float32"}, {"name": "814", "dtype": "float32"}, {"name": "815", "dtype": "float32"}, {"name": "816", "dtype": "float32"}, {"name": "817", "dtype": "float32"}, {"name": "818", "dtype": "float32"}, {"name": "819", "dtype": "float32"}, {"name": "820", "dtype": "float32"}, {"name": "821", "dtype": "float32"}, {"name": "822", "dtype": "float32"}, {"name": "823", "dtype": "float32"}, {"name": "824", "dtype": "float32"}, {"name": "825", "dtype": "float32"}, {"name": "826", "dtype": "float32"}, {"name": "827", "dtype": "float32"}, {"name": "828", "dtype": "float32"}, {"name": "829", "dtype": "float32"}, {"name": "830", "dtype": "float32"}, {"name": "831", "dtype": "float32"}, {"name": "832", "dtype": "float32"}, {"name": "833", "dtype": "float32"}, {"name": "834", "dtype": "float32"}, {"name": "835", "dtype": "float32"}, {"name": "836", "dtype": "float32"}, {"name": "837", "dtype": "float32"}, {"name": "838", "dtype": "float32"}, {"name": "839", "dtype": "float32"}, {"name": "840", "dtype": "float32"}, {"name": "841", "dtype": "float32"}, {"name": "842", "dtype": "float32"}, {"name": "843", "dtype": "float32"}, {"name": "844", "dtype": "float32"}, {"name": "845", "dtype": "float32"}, {"name": "846", "dtype": "float32"}, {"name": "847", "dtype": "float32"}, {"name": "848", "dtype": "float32"}, {"name": "849", "dtype": "float32"}, {"name": "850", "dtype": "float32"}, {"name": "851", "dtype": "float32"}, {"name": "852", "dtype": "float32"}, {"name": "853", "dtype": "float32"}, {"name": "854", "dtype": "float32"}, {"name": "855", "dtype": "float32"}, {"name": "856", "dtype": "float32"}, {"name": "857", "dtype": "float32"}, {"name": "858", "dtype": "float32"}, {"name": "859", "dtype": "float32"}, {"name": "860", "dtype": "float32"}, {"name": "861", "dtype": "float32"}, {"name": "862", "dtype": "float32"}, {"name": "863", "dtype": "float32"}, {"name": "864", "dtype": "float32"}, {"name": "865", "dtype": "float32"}, {"name": "866", "dtype": "float32"}, {"name": "867", "dtype": "float32"}, {"name": "868", "dtype": "float32"}, {"name": "869", "dtype": "float32"}, {"name": "870", "dtype": "float32"}, {"name": "871", "dtype": "float32"}, {"name": "872", "dtype": "float32"}, {"name": "873", "dtype": "float32"}, {"name": "874", "dtype": "float32"}, {"name": "875", "dtype": "float32"}, {"name": "876", "dtype": "float32"}, {"name": "877", "dtype": "float32"}, {"name": "878", "dtype": "float32"}, {"name": "879", "dtype": "float32"}, {"name": "880", "dtype": "float32"}, {"name": "881", "dtype": "float32"}, {"name": "882", "dtype": "float32"}, {"name": "883", "dtype": "float32"}, {"name": "884", "dtype": "float32"}, {"name": "885", "dtype": "float32"}, {"name": "886", "dtype": "float32"}, {"name": "887", "dtype": "float32"}, {"name": "888", "dtype": "float32"}, {"name": "889", "dtype": "float32"}, {"name": "890", "dtype": "float32"}, {"name": "891", "dtype": "float32"}, {"name": "892", "dtype": "float32"}, {"name": "893", "dtype": "float32"}, {"name": "894", "dtype": "float32"}, {"name": "895", "dtype": "float32"}, {"name": "896", "dtype": "float32"}, {"name": "897", "dtype": "float32"}, {"name": "898", "dtype": "float32"}, {"name": "899", "dtype": "float32"}, {"name": "900", "dtype": "float32"}, {"name": "901", "dtype": "float32"}, {"name": "902", "dtype": "float32"}, {"name": "903", "dtype": "float32"}, {"name": "904", "dtype": "float32"}, {"name": "905", "dtype": "float32"}, {"name": "906", "dtype": "float32"}, {"name": "907", "dtype": "float32"}, {"name": "908", "dtype": "float32"}, {"name": "909", "dtype": "float32"}, {"name": "910", "dtype": "float32"}, {"name": "911", "dtype": "float32"}, {"name": "912", "dtype": "float32"}, {"name": "913", "dtype": "float32"}, {"name": "914", "dtype": "float32"}, {"name": "915", "dtype": "float32"}, {"name": "916", "dtype": "float32"}, {"name": "917", "dtype": "float32"}, {"name": "918", "dtype": "float32"}, {"name": "919", "dtype": "float32"}, {"name": "920", "dtype": "float32"}, {"name": "921", "dtype": "float32"}, {"name": "922", "dtype": "float32"}, {"name": "923", "dtype": "float32"}, {"name": "924", "dtype": "float32"}, {"name": "925", "dtype": "float32"}, {"name": "926", "dtype": "float32"}, {"name": "927", "dtype": "float32"}, {"name": "928", "dtype": "float32"}, {"name": "929", "dtype": "float32"}, {"name": "930", "dtype": "float32"}, {"name": "931", "dtype": "float32"}, {"name": "932", "dtype": "float32"}, {"name": "933", "dtype": "float32"}, {"name": "934", "dtype": "float32"}, {"name": "935", "dtype": "float32"}, {"name": "936", "dtype": "float32"}, {"name": "937", "dtype": "float32"}, {"name": "938", "dtype": "float32"}, {"name": "939", "dtype": "float32"}, {"name": "940", "dtype": "float32"}, {"name": "941", "dtype": "float32"}, {"name": "942", "dtype": "float32"}, {"name": "943", "dtype": "float32"}, {"name": "944", "dtype": "float32"}, {"name": "945", "dtype": "float32"}, {"name": "946", "dtype": "float32"}, {"name": "947", "dtype": "float32"}, {"name": "948", "dtype": "float32"}, {"name": "949", "dtype": "float32"}, {"name": "950", "dtype": "float32"}, {"name": "951", "dtype": "float32"}, {"name": "952", "dtype": "float32"}, {"name": "953", "dtype": "float32"}, {"name": "954", "dtype": "float32"}, {"name": "955", "dtype": "float32"}, {"name": "956", "dtype": "float32"}, {"name": "957", "dtype": "float32"}, {"name": "958", "dtype": "float32"}, {"name": "959", "dtype": "float32"}, {"name": "960", "dtype": "float32"}, {"name": "961", "dtype": "float32"}, {"name": "962", "dtype": "float32"}, {"name": "963", "dtype": "float32"}, {"name": "964", "dtype": "float32"}, {"name": "965", "dtype": "float32"}, {"name": "966", "dtype": "float32"}, {"name": "967", "dtype": "float32"}, {"name": "968", "dtype": "float32"}, {"name": "969", "dtype": "float32"}, {"name": "970", "dtype": "float32"}, {"name": "971", "dtype": "float32"}, {"name": "972", "dtype": "float32"}, {"name": "973", "dtype": "float32"}, {"name": "974", "dtype": "float32"}, {"name": "975", "dtype": "float32"}, {"name": "976", "dtype": "float32"}, {"name": "977", "dtype": "float32"}, {"name": "978", "dtype": "float32"}, {"name": "979", "dtype": "float32"}, {"name": "980", "dtype": "float32"}, {"name": "981", "dtype": "float32"}, {"name": "982", "dtype": "float32"}, {"name": "983", "dtype": "float32"}, {"name": "984", "dtype": "float32"}, {"name": "985", "dtype": "float32"}, {"name": "986", "dtype": "float32"}, {"name": "987", "dtype": "float32"}, {"name": "988", "dtype": "float32"}, {"name": "989", "dtype": "float32"}, {"name": "990", "dtype": "float32"}, {"name": "991", "dtype": "float32"}, {"name": "992", "dtype": "float32"}, {"name": "993", "dtype": "float32"}, {"name": "994", "dtype": "float32"}, {"name": "995", "dtype": "float32"}, {"name": "996", "dtype": "float32"}, {"name": "997", "dtype": "float32"}, {"name": "998", "dtype": "float32"}, {"name": "999", "dtype": "float32"}, {"name": "1000", "dtype": "float32"}, {"name": "1001", "dtype": "float32"}, {"name": "1002", "dtype": "float32"}, {"name": "1003", "dtype": "float32"}, {"name": "1004", "dtype": "float32"}, {"name": "1005", "dtype": "float32"}, {"name": "1006", "dtype": "float32"}, {"name": "1007", "dtype": "float32"}, {"name": "1008", "dtype": "float32"}, {"name": "1009", "dtype": "float32"}, {"name": "1010", "dtype": "float32"}, {"name": "1011", "dtype": "float32"}, {"name": "1012", "dtype": "float32"}, {"name": "1013", "dtype": "float32"}, {"name": "1014", "dtype": "float32"}, {"name": "1015", "dtype": "float32"}, {"name": "1016", "dtype": "float32"}, {"name": "1017", "dtype": "float32"}, {"name": "1018", "dtype": "float32"}, {"name": "1019", "dtype": "float32"}, {"name": "1020", "dtype": "float32"}, {"name": "1021", "dtype": "float32"}, {"name": "1022", "dtype": "float32"}, {"name": "1023", "dtype": "float32"}, {"name": "1024", "dtype": "float32"}, {"name": "1025", "dtype": "float32"}, {"name": "1026", "dtype": "float32"}, {"name": "1027", "dtype": "float32"}, {"name": "1028", "dtype": "float32"}, {"name": "1029", "dtype": "float32"}, {"name": "1030", "dtype": "float32"}, {"name": "1031", "dtype": "float32"}, {"name": "1032", "dtype": "float32"}, {"name": "1033", "dtype": "float32"}, {"name": "1034", "dtype": "float32"}, {"name": "1035", "dtype": "float32"}, {"name": "1036", "dtype": "float32"}, {"name": "1037", "dtype": "float32"}, {"name": "1038", "dtype": "float32"}, {"name": "1039", "dtype": "float32"}, {"name": "1040", "dtype": "float32"}, {"name": "1041", "dtype": "float32"}, {"name": "1042", "dtype": "float32"}, {"name": "1043", "dtype": "float32"}, {"name": "1044", "dtype": "float32"}, {"name": "1045", "dtype": "float32"}, {"name": "1046", "dtype": "float32"}, {"name": "1047", "dtype": "float32"}, {"name": "1048", "dtype": "float32"}, {"name": "1049", "dtype": "float32"}, {"name": "1050", "dtype": "float32"}, {"name": "1051", "dtype": "float32"}, {"name": "1052", "dtype": "float32"}, {"name": "1053", "dtype": "float32"}, {"name": "1054", "dtype": "float32"}, {"name": "1055", "dtype": "float32"}, {"name": "1056", "dtype": "float32"}, {"name": "1057", "dtype": "float32"}, {"name": "1058", "dtype": "float32"}, {"name": "1059", "dtype": "float32"}, {"name": "1060", "dtype": "float32"}, {"name": "1061", "dtype": "float32"}, {"name": "1062", "dtype": "float32"}, {"name": "1063", "dtype": "float32"}, {"name": "1064", "dtype": "float32"}, {"name": "1065", "dtype": "float32"}, {"name": "1066", "dtype": "float32"}, {"name": "1067", "dtype": "float32"}, {"name": "1068", "dtype": "float32"}, {"name": "1069", "dtype": "float32"}, {"name": "1070", "dtype": "float32"}, {"name": "1071", "dtype": "float32"}, {"name": "1072", "dtype": "float32"}, {"name": "1073", "dtype": "float32"}, {"name": "1074", "dtype": "float32"}, {"name": "1075", "dtype": "float32"}, {"name": "1076", "dtype": "float32"}, {"name": "1077", "dtype": "float32"}, {"name": "1078", "dtype": "float32"}, {"name": "1079", "dtype": "float32"}, {"name": "1080", "dtype": "float32"}, {"name": "1081", "dtype": "float32"}, {"name": "1082", "dtype": "float32"}, {"name": "1083", "dtype": "float32"}, {"name": "1084", "dtype": "float32"}, {"name": "1085", "dtype": "float32"}, {"name": "1086", "dtype": "float32"}, {"name": "1087", "dtype": "float32"}, {"name": "1088", "dtype": "float32"}, {"name": "1089", "dtype": "float32"}, {"name": "1090", "dtype": "float32"}, {"name": "1091", "dtype": "float32"}, {"name": "1092", "dtype": "float32"}, {"name": "1093", "dtype": "float32"}, {"name": "1094", "dtype": "float32"}, {"name": "1095", "dtype": "float32"}, {"name": "1096", "dtype": "float32"}, {"name": "1097", "dtype": "float32"}, {"name": "1098", "dtype": "float32"}, {"name": "1099", "dtype": "float32"}, {"name": "1100", "dtype": "float32"}, {"name": "1101", "dtype": "float32"}, {"name": "1102", "dtype": "float32"}, {"name": "1103", "dtype": "float32"}, {"name": "1104", "dtype": "float32"}, {"name": "1105", "dtype": "float32"}, {"name": "1106", "dtype": "float32"}, {"name": "1107", "dtype": "float32"}, {"name": "1108", "dtype": "float32"}, {"name": "1109", "dtype": "float32"}, {"name": "1110", "dtype": "float32"}, {"name": "1111", "dtype": "float32"}, {"name": "1112", "dtype": "float32"}, {"name": "1113", "dtype": "float32"}, {"name": "1114", "dtype": "float32"}, {"name": "1115", "dtype": "float32"}, {"name": "1116", "dtype": "float32"}, {"name": "1117", "dtype": "float32"}, {"name": "1118", "dtype": "float32"}, {"name": "1119", "dtype": "float32"}, {"name": "1120", "dtype": "float32"}, {"name": "1121", "dtype": "float32"}, {"name": "1122", "dtype": "float32"}, {"name": "1123", "dtype": "float32"}, {"name": "1124", "dtype": "float32"}, {"name": "1125", "dtype": "float32"}, {"name": "1126", "dtype": "float32"}, {"name": "1127", "dtype": "float32"}, {"name": "1128", "dtype": "float32"}, {"name": "1129", "dtype": "float32"}, {"name": "1130", "dtype": "float32"}, {"name": "1131", "dtype": "float32"}, {"name": "1132", "dtype": "float32"}, {"name": "1133", "dtype": "float32"}, {"name": "1134", "dtype": "float32"}, {"name": "1135", "dtype": "float32"}, {"name": "1136", "dtype": "float32"}, {"name": "1137", "dtype": "float32"}, {"name": "1138", "dtype": "float32"}, {"name": "1139", "dtype": "float32"}, {"name": "1140", "dtype": "float32"}, {"name": "1141", "dtype": "float32"}, {"name": "1142", "dtype": "float32"}, {"name": "1143", "dtype": "float32"}, {"name": "1144", "dtype": "float32"}, {"name": "1145", "dtype": "float32"}, {"name": "1146", "dtype": "float32"}, {"name": "1147", "dtype": "float32"}, {"name": "1148", "dtype": "float32"}, {"name": "1149", "dtype": "float32"}, {"name": "1150", "dtype": "float32"}, {"name": "1151", "dtype": "float32"}, {"name": "1152", "dtype": "float32"}, {"name": "1153", "dtype": "float32"}, {"name": "1154", "dtype": "float32"}, {"name": "1155", "dtype": "float32"}, {"name": "1156", "dtype": "float32"}, {"name": "1157", "dtype": "float32"}, {"name": "1158", "dtype": "float32"}, {"name": "1159", "dtype": "float32"}, {"name": "1160", "dtype": "float32"}, {"name": "1161", "dtype": "float32"}, {"name": "1162", "dtype": "float32"}, {"name": "1163", "dtype": "float32"}, {"name": "1164", "dtype": "float32"}, {"name": "1165", "dtype": "float32"}, {"name": "1166", "dtype": "float32"}, {"name": "1167", "dtype": "float32"}, {"name": "1168", "dtype": "float32"}, {"name": "1169", "dtype": "float32"}, {"name": "1170", "dtype": "float32"}, {"name": "1171", "dtype": "float32"}, {"name": "1172", "dtype": "float32"}, {"name": "1173", "dtype": "float32"}, {"name": "1174", "dtype": "float32"}, {"name": "1175", "dtype": "float32"}, {"name": "1176", "dtype": "float32"}, {"name": "1177", "dtype": "float32"}, {"name": "1178", "dtype": "float32"}, {"name": "1179", "dtype": "float32"}, {"name": "1180", "dtype": "float32"}, {"name": "1181", "dtype": "float32"}, {"name": "1182", "dtype": "float32"}, {"name": "1183", "dtype": "float32"}, {"name": "1184", "dtype": "float32"}, {"name": "1185", "dtype": "float32"}, {"name": "1186", "dtype": "float32"}, {"name": "1187", "dtype": "float32"}, {"name": "1188", "dtype": "float32"}, {"name": "1189", "dtype": "float32"}, {"name": "1190", "dtype": "float32"}, {"name": "1191", "dtype": "float32"}, {"name": "1192", "dtype": "float32"}, {"name": "1193", "dtype": "float32"}, {"name": "1194", "dtype": "float32"}, {"name": "1195", "dtype": "float32"}, {"name": "1196", "dtype": "float32"}, {"name": "1197", "dtype": "float32"}, {"name": "1198", "dtype": "float32"}, {"name": "1199", "dtype": "float32"}, {"name": "1200", "dtype": "float32"}, {"name": "1201", "dtype": "float32"}, {"name": "1202", "dtype": "float32"}, {"name": "1203", "dtype": "float32"}, {"name": "1204", "dtype": "float32"}, {"name": "1205", "dtype": "float32"}, {"name": "1206", "dtype": "float32"}, {"name": "1207", "dtype": "float32"}, {"name": "1208", "dtype": "float32"}, {"name": "1209", "dtype": "float32"}, {"name": "1210", "dtype": "float32"}, {"name": "1211", "dtype": "float32"}, {"name": "1212", "dtype": "float32"}, {"name": "1213", "dtype": "float32"}, {"name": "1214", "dtype": "float32"}, {"name": "1215", "dtype": "float32"}, {"name": "1216", "dtype": "float32"}, {"name": "1217", "dtype": "float32"}, {"name": "1218", "dtype": "float32"}, {"name": "1219", "dtype": "float32"}, {"name": "1220", "dtype": "float32"}, {"name": "1221", "dtype": "float32"}, {"name": "1222", "dtype": "float32"}, {"name": "1223", "dtype": "float32"}, {"name": "1224", "dtype": "float32"}, {"name": "1225", "dtype": "float32"}, {"name": "1226", "dtype": "float32"}, {"name": "1227", "dtype": "float32"}, {"name": "1228", "dtype": "float32"}, {"name": "1229", "dtype": "float32"}, {"name": "1230", "dtype": "float32"}, {"name": "1231", "dtype": "float32"}, {"name": "1232", "dtype": "float32"}, {"name": "1233", "dtype": "float32"}, {"name": "1234", "dtype": "float32"}, {"name": "1235", "dtype": "float32"}, {"name": "1236", "dtype": "float32"}, {"name": "1237", "dtype": "float32"}, {"name": "1238", "dtype": "float32"}, {"name": "1239", "dtype": "float32"}, {"name": "1240", "dtype": "float32"}, {"name": "1241", "dtype": "float32"}, {"name": "1242", "dtype": "float32"}, {"name": "1243", "dtype": "float32"}, {"name": "1244", "dtype": "float32"}, {"name": "1245", "dtype": "float32"}, {"name": "1246", "dtype": "float32"}, {"name": "1247", "dtype": "float32"}, {"name": "1248", "dtype": "float32"}, {"name": "1249", "dtype": "float32"}, {"name": "1250", "dtype": "float32"}, {"name": "1251", "dtype": "float32"}, {"name": "1252", "dtype": "float32"}, {"name": "1253", "dtype": "float32"}, {"name": "1254", "dtype": "float32"}, {"name": "1255", "dtype": "float32"}, {"name": "1256", "dtype": "float32"}, {"name": "1257", "dtype": "float32"}, {"name": "1258", "dtype": "float32"}, {"name": "1259", "dtype": "float32"}, {"name": "1260", "dtype": "float32"}, {"name": "1261", "dtype": "float32"}, {"name": "1262", "dtype": "float32"}, {"name": "1263", "dtype": "float32"}, {"name": "1264", "dtype": "float32"}, {"name": "1265", "dtype": "float32"}, {"name": "1266", "dtype": "float32"}, {"name": "1267", "dtype": "float32"}, {"name": "1268", "dtype": "float32"}, {"name": "1269", "dtype": "float32"}, {"name": "1270", "dtype": "float32"}, {"name": "1271", "dtype": "float32"}, {"name": "1272", "dtype": "float32"}, {"name": "1273", "dtype": "float32"}, {"name": "1274", "dtype": "float32"}, {"name": "1275", "dtype": "float32"}, {"name": "1276", "dtype": "float32"}, {"name": "1277", "dtype": "float32"}, {"name": "1278", "dtype": "float32"}, {"name": "1279", "dtype": "float32"}, {"name": "1280", "dtype": "float32"}, {"name": "1281", "dtype": "float32"}, {"name": "1282", "dtype": "float32"}, {"name": "1283", "dtype": "float32"}, {"name": "1284", "dtype": "float32"}, {"name": "1285", "dtype": "float32"}, {"name": "1286", "dtype": "float32"}, {"name": "1287", "dtype": "float32"}, {"name": "1288", "dtype": "float32"}, {"name": "1289", "dtype": "float32"}, {"name": "1290", "dtype": "float32"}, {"name": "1291", "dtype": "float32"}, {"name": "1292", "dtype": "float32"}, {"name": "1293", "dtype": "float32"}, {"name": "1294", "dtype": "float32"}, {"name": "1295", "dtype": "float32"}, {"name": "1296", "dtype": "float32"}, {"name": "1297", "dtype": "float32"}, {"name": "1298", "dtype": "float32"}, {"name": "1299", "dtype": "float32"}, {"name": "1300", "dtype": "float32"}, {"name": "1301", "dtype": "float32"}, {"name": "1302", "dtype": "float32"}, {"name": "1303", "dtype": "float32"}, {"name": "1304", "dtype": "float32"}, {"name": "1305", "dtype": "float32"}, {"name": "1306", "dtype": "float32"}, {"name": "1307", "dtype": "float32"}, {"name": "1308", "dtype": "float32"}, {"name": "1309", "dtype": "float32"}, {"name": "1310", "dtype": "float32"}, {"name": "1311", "dtype": "float32"}, {"name": "1312", "dtype": "float32"}, {"name": "1313", "dtype": "float32"}, {"name": "1314", "dtype": "float32"}, {"name": "1315", "dtype": "float32"}, {"name": "1316", "dtype": "float32"}, {"name": "1317", "dtype": "float32"}, {"name": "1318", "dtype": "float32"}, {"name": "1319", "dtype": "float32"}, {"name": "1320", "dtype": "float32"}, {"name": "1321", "dtype": "float32"}, {"name": "1322", "dtype": "float32"}, {"name": "1323", "dtype": "float32"}, {"name": "1324", "dtype": "float32"}, {"name": "1325", "dtype": "float32"}, {"name": "1326", "dtype": "float32"}, {"name": "1327", "dtype": "float32"}, {"name": "1328", "dtype": "float32"}, {"name": "1329", "dtype": "float32"}, {"name": "1330", "dtype": "float32"}, {"name": "1331", "dtype": "float32"}, {"name": "1332", "dtype": "float32"}, {"name": "1333", "dtype": "float32"}, {"name": "1334", "dtype": "float32"}, {"name": "1335", "dtype": "float32"}, {"name": "1336", "dtype": "float32"}, {"name": "1337", "dtype": "float32"}, {"name": "1338", "dtype": "float32"}, {"name": "1339", "dtype": "float32"}, {"name": "1340", "dtype": "float32"}, {"name": "1341", "dtype": "float32"}, {"name": "1342", "dtype": "float32"}, {"name": "1343", "dtype": "float32"}, {"name": "1344", "dtype": "float32"}, {"name": "1345", "dtype": "float32"}, {"name": "1346", "dtype": "float32"}, {"name": "1347", "dtype": "float32"}, {"name": "1348", "dtype": "float32"}, {"name": "1349", "dtype": "float32"}, {"name": "1350", "dtype": "float32"}, {"name": "1351", "dtype": "float32"}, {"name": "1352", "dtype": "float32"}, {"name": "1353", "dtype": "float32"}, {"name": "1354", "dtype": "float32"}, {"name": "1355", "dtype": "float32"}, {"name": "1356", "dtype": "float32"}, {"name": "1357", "dtype": "float32"}, {"name": "1358", "dtype": "float32"}, {"name": "1359", "dtype": "float32"}, {"name": "1360", "dtype": "float32"}, {"name": "1361", "dtype": "float32"}, {"name": "1362", "dtype": "float32"}, {"name": "1363", "dtype": "float32"}, {"name": "1364", "dtype": "float32"}, {"name": "1365", "dtype": "float32"}, {"name": "1366", "dtype": "float32"}, {"name": "1367", "dtype": "float32"}, {"name": "1368", "dtype": "float32"}, {"name": "1369", "dtype": "float32"}, {"name": "1370", "dtype": "float32"}, {"name": "1371", "dtype": "float32"}, {"name": "1372", "dtype": "float32"}, {"name": "1373", "dtype": "float32"}, {"name": "1374", "dtype": "float32"}, {"name": "1375", "dtype": "float32"}, {"name": "1376", "dtype": "float32"}, {"name": "1377", "dtype": "float32"}, {"name": "1378", "dtype": "float32"}, {"name": "1379", "dtype": "float32"}, {"name": "1380", "dtype": "float32"}, {"name": "1381", "dtype": "float32"}, {"name": "1382", "dtype": "float32"}, {"name": "1383", "dtype": "float32"}, {"name": "1384", "dtype": "float32"}, {"name": "1385", "dtype": "float32"}, {"name": "1386", "dtype": "float32"}, {"name": "1387", "dtype": "float32"}, {"name": "1388", "dtype": "float32"}, {"name": "1389", "dtype": "float32"}, {"name": "1390", "dtype": "float32"}, {"name": "1391", "dtype": "float32"}, {"name": "1392", "dtype": "float32"}, {"name": "1393", "dtype": "float32"}, {"name": "1394", "dtype": "float32"}, {"name": "1395", "dtype": "float32"}, {"name": "1396", "dtype": "float32"}, {"name": "1397", "dtype": "float32"}, {"name": "1398", "dtype": "float32"}, {"name": "1399", "dtype": "float32"}, {"name": "1400", "dtype": "float32"}, {"name": "1401", "dtype": "float32"}, {"name": "1402", "dtype": "float32"}, {"name": "1403", "dtype": "float32"}, {"name": "1404", "dtype": "float32"}, {"name": "1405", "dtype": "float32"}, {"name": "1406", "dtype": "float32"}, {"name": "1407", "dtype": "float32"}, {"name": "1408", "dtype": "float32"}, {"name": "1409", "dtype": "float32"}, {"name": "1410", "dtype": "float32"}, {"name": "1411", "dtype": "float32"}, {"name": "1412", "dtype": "float32"}, {"name": "1413", "dtype": "float32"}, {"name": "1414", "dtype": "float32"}, {"name": "1415", "dtype": "float32"}, {"name": "1416", "dtype": "float32"}, {"name": "1417", "dtype": "float32"}, {"name": "1418", "dtype": "float32"}, {"name": "1419", "dtype": "float32"}, {"name": "1420", "dtype": "float32"}, {"name": "1421", "dtype": "float32"}, {"name": "1422", "dtype": "float32"}, {"name": "1423", "dtype": "float32"}, {"name": "1424", "dtype": "float32"}, {"name": "1425", "dtype": "float32"}, {"name": "1426", "dtype": "float32"}, {"name": "1427", "dtype": "float32"}, {"name": "1428", "dtype": "float32"}, {"name": "1429", "dtype": "float32"}, {"name": "1430", "dtype": "float32"}, {"name": "1431", "dtype": "float32"}, {"name": "1432", "dtype": "float32"}, {"name": "1433", "dtype": "float32"}, {"name": "1434", "dtype": "float32"}, {"name": "1435", "dtype": "float32"}, {"name": "1436", "dtype": "float32"}, {"name": "1437", "dtype": "float32"}, {"name": "1438", "dtype": "float32"}, {"name": "1439", "dtype": "float32"}, {"name": "1440", "dtype": "float32"}, {"name": "1441", "dtype": "float32"}, {"name": "1442", "dtype": "float32"}, {"name": "1443", "dtype": "float32"}, {"name": "1444", "dtype": "float32"}, {"name": "1445", "dtype": "float32"}, {"name": "1446", "dtype": "float32"}, {"name": "1447", "dtype": "float32"}, {"name": "1448", "dtype": "float32"}, {"name": "1449", "dtype": "float32"}, {"name": "1450", "dtype": "float32"}, {"name": "1451", "dtype": "float32"}, {"name": "1452", "dtype": "float32"}, {"name": "1453", "dtype": "float32"}, {"name": "1454", "dtype": "float32"}, {"name": "1455", "dtype": "float32"}, {"name": "1456", "dtype": "float32"}, {"name": "1457", "dtype": "float32"}, {"name": "1458", "dtype": "float32"}, {"name": "1459", "dtype": "float32"}, {"name": "1460", "dtype": "float32"}, {"name": "1461", "dtype": "float32"}, {"name": "1462", "dtype": "float32"}, {"name": "1463", "dtype": "float32"}, {"name": "1464", "dtype": "float32"}, {"name": "1465", "dtype": "float32"}, {"name": "1466", "dtype": "float32"}, {"name": "1467", "dtype": "float32"}, {"name": "1468", "dtype": "float32"}, {"name": "1469", "dtype": "float32"}, {"name": "1470", "dtype": "float32"}, {"name": "1471", "dtype": "float32"}, {"name": "1472", "dtype": "float32"}, {"name": "1473", "dtype": "float32"}, {"name": "1474", "dtype": "float32"}, {"name": "1475", "dtype": "float32"}, {"name": "1476", "dtype": "float32"}, {"name": "1477", "dtype": "float32"}, {"name": "1478", "dtype": "float32"}, {"name": "1479", "dtype": "float32"}, {"name": "1480", "dtype": "float32"}, {"name": "1481", "dtype": "float32"}, {"name": "1482", "dtype": "float32"}, {"name": "1483", "dtype": "float32"}, {"name": "1484", "dtype": "float32"}, {"name": "1485", "dtype": "float32"}, {"name": "1486", "dtype": "float32"}, {"name": "1487", "dtype": "float32"}, {"name": "1488", "dtype": "float32"}, {"name": "1489", "dtype": "float32"}, {"name": "1490", "dtype": "float32"}, {"name": "1491", "dtype": "float32"}, {"name": "1492", "dtype": "float32"}, {"name": "1493", "dtype": "float32"}, {"name": "1494", "dtype": "float32"}, {"name": "1495", "dtype": "float32"}, {"name": "1496", "dtype": "float32"}, {"name": "1497", "dtype": "float32"}, {"name": "1498", "dtype": "float32"}, {"name": "1499", "dtype": "float32"}, {"name": "1500", "dtype": "float32"}, {"name": "1501", "dtype": "float32"}, {"name": "1502", "dtype": "float32"}, {"name": "1503", "dtype": "float32"}, {"name": "1504", "dtype": "float32"}, {"name": "1505", "dtype": "float32"}, {"name": "1506", "dtype": "float32"}, {"name": "1507", "dtype": "float32"}, {"name": "1508", "dtype": "float32"}, {"name": "1509", "dtype": "float32"}, {"name": "1510", "dtype": "float32"}, {"name": "1511", "dtype": "float32"}, {"name": "1512", "dtype": "float32"}, {"name": "1513", "dtype": "float32"}, {"name": "1514", "dtype": "float32"}, {"name": "1515", "dtype": "float32"}, {"name": "1516", "dtype": "float32"}, {"name": "1517", "dtype": "float32"}, {"name": "1518", "dtype": "float32"}, {"name": "1519", "dtype": "float32"}, {"name": "1520", "dtype": "float32"}, {"name": "1521", "dtype": "float32"}, {"name": "1522", "dtype": "float32"}, {"name": "1523", "dtype": "float32"}, {"name": "1524", "dtype": "float32"}, {"name": "1525", "dtype": "float32"}, {"name": "1526", "dtype": "float32"}, {"name": "1527", "dtype": "float32"}, {"name": "1528", "dtype": "float32"}, {"name": "1529", "dtype": "float32"}, {"name": "1530", "dtype": "float32"}, {"name": "1531", "dtype": "float32"}, {"name": "1532", "dtype": "float32"}, {"name": "1533", "dtype": "float32"}, {"name": "1534", "dtype": "float32"}, {"name": "1535", "dtype": "float32"}, {"name": "1536", "dtype": "float32"}, {"name": "1537", "dtype": "float32"}, {"name": "1538", "dtype": "float32"}, {"name": "1539", "dtype": "float32"}, {"name": "1540", "dtype": "float32"}, {"name": "1541", "dtype": "float32"}, {"name": "1542", "dtype": "float32"}, {"name": "1543", "dtype": "float32"}, {"name": "1544", "dtype": "float32"}, {"name": "1545", "dtype": "float32"}, {"name": "1546", "dtype": "float32"}, {"name": "1547", "dtype": "float32"}, {"name": "1548", "dtype": "float32"}, {"name": "1549", "dtype": "float32"}, {"name": "1550", "dtype": "float32"}, {"name": "1551", "dtype": "float32"}, {"name": "1552", "dtype": "float32"}, {"name": "1553", "dtype": "float32"}, {"name": "1554", "dtype": "float32"}, {"name": "1555", "dtype": "float32"}, {"name": "1556", "dtype": "float32"}, {"name": "1557", "dtype": "float32"}, {"name": "1558", "dtype": "float32"}, {"name": "1559", "dtype": "float32"}, {"name": "1560", "dtype": "float32"}, {"name": "1561", "dtype": "float32"}, {"name": "1562", "dtype": "float32"}, {"name": "1563", "dtype": "float32"}, {"name": "1564", "dtype": "float32"}, {"name": "1565", "dtype": "float32"}, {"name": "1566", "dtype": "float32"}, {"name": "1567", "dtype": "float32"}, {"name": "1568", "dtype": "float32"}, {"name": "1569", "dtype": "float32"}, {"name": "1570", "dtype": "float32"}, {"name": "1571", "dtype": "float32"}, {"name": "1572", "dtype": "float32"}, {"name": "1573", "dtype": "float32"}, {"name": "1574", "dtype": "float32"}, {"name": "1575", "dtype": "float32"}, {"name": "1576", "dtype": "float32"}, {"name": "1577", "dtype": "float32"}, {"name": "1578", "dtype": "float32"}, {"name": "1579", "dtype": "float32"}, {"name": "1580", "dtype": "float32"}, {"name": "1581", "dtype": "float32"}, {"name": "1582", "dtype": "float32"}, {"name": "1583", "dtype": "float32"}, {"name": "1584", "dtype": "float32"}, {"name": "1585", "dtype": "float32"}, {"name": "1586", "dtype": "float32"}, {"name": "1587", "dtype": "float32"}, {"name": "1588", "dtype": "float32"}, {"name": "1589", "dtype": "float32"}, {"name": "1590", "dtype": "float32"}, {"name": "1591", "dtype": "float32"}, {"name": "1592", "dtype": "float32"}, {"name": "1593", "dtype": "float32"}, {"name": "1594", "dtype": "float32"}, {"name": "1595", "dtype": "float32"}, {"name": "1596", "dtype": "float32"}, {"name": "1597", "dtype": "float32"}, {"name": "1598", "dtype": "float32"}, {"name": "1599", "dtype": "float32"}, {"name": "1600", "dtype": "float32"}, {"name": "1601", "dtype": "float32"}, {"name": "1602", "dtype": "float32"}, {"name": "1603", "dtype": "float32"}, {"name": "1604", "dtype": "float32"}, {"name": "1605", "dtype": "float32"}, {"name": "1606", "dtype": "float32"}, {"name": "1607", "dtype": "float32"}, {"name": "1608", "dtype": "float32"}, {"name": "1609", "dtype": "float32"}, {"name": "1610", "dtype": "float32"}, {"name": "1611", "dtype": "float32"}, {"name": "1612", "dtype": "float32"}, {"name": "1613", "dtype": "float32"}, {"name": "1614", "dtype": "float32"}, {"name": "1615", "dtype": "float32"}, {"name": "1616", "dtype": "float32"}, {"name": "1617", "dtype": "float32"}, {"name": "1618", "dtype": "float32"}, {"name": "1619", "dtype": "float32"}, {"name": "1620", "dtype": "float32"}, {"name": "1621", "dtype": "float32"}, {"name": "1622", "dtype": "float32"}, {"name": "1623", "dtype": "float32"}, {"name": "1624", "dtype": "float32"}, {"name": "1625", "dtype": "float32"}, {"name": "1626", "dtype": "float32"}, {"name": "1627", "dtype": "float32"}, {"name": "1628", "dtype": "float32"}, {"name": "1629", "dtype": "float32"}, {"name": "1630", "dtype": "float32"}, {"name": "1631", "dtype": "float32"}, {"name": "1632", "dtype": "float32"}, {"name": "1633", "dtype": "float32"}, {"name": "1634", "dtype": "float32"}, {"name": "1635", "dtype": "float32"}, {"name": "1636", "dtype": "float32"}, {"name": "1637", "dtype": "float32"}, {"name": "1638", "dtype": "float32"}, {"name": "1639", "dtype": "float32"}, {"name": "1640", "dtype": "float32"}, {"name": "1641", "dtype": "float32"}, {"name": "1642", "dtype": "float32"}, {"name": "1643", "dtype": "float32"}, {"name": "1644", "dtype": "float32"}, {"name": "1645", "dtype": "float32"}, {"name": "1646", "dtype": "float32"}, {"name": "1647", "dtype": "float32"}, {"name": "1648", "dtype": "float32"}, {"name": "1649", "dtype": "float32"}, {"name": "1650", "dtype": "float32"}, {"name": "1651", "dtype": "float32"}, {"name": "1652", "dtype": "float32"}, {"name": "1653", "dtype": "float32"}, {"name": "1654", "dtype": "float32"}, {"name": "1655", "dtype": "float32"}, {"name": "1656", "dtype": "float32"}, {"name": "1657", "dtype": "float32"}, {"name": "1658", "dtype": "float32"}, {"name": "1659", "dtype": "float32"}, {"name": "1660", "dtype": "float32"}, {"name": "1661", "dtype": "float32"}, {"name": "1662", "dtype": "float32"}, {"name": "1663", "dtype": "float32"}, {"name": "1664", "dtype": "float32"}, {"name": "1665", "dtype": "float32"}, {"name": "1666", "dtype": "float32"}, {"name": "1667", "dtype": "float32"}, {"name": "1668", "dtype": "float32"}, {"name": "1669", "dtype": "float32"}, {"name": "1670", "dtype": "float32"}, {"name": "1671", "dtype": "float32"}, {"name": "1672", "dtype": "float32"}, {"name": "1673", "dtype": "float32"}, {"name": "1674", "dtype": "float32"}, {"name": "1675", "dtype": "float32"}, {"name": "1676", "dtype": "float32"}, {"name": "1677", "dtype": "float32"}, {"name": "1678", "dtype": "float32"}, {"name": "1679", "dtype": "float32"}, {"name": "1680", "dtype": "float32"}, {"name": "1681", "dtype": "float32"}, {"name": "1682", "dtype": "float32"}, {"name": "1683", "dtype": "float32"}, {"name": "1684", "dtype": "float32"}, {"name": "1685", "dtype": "float32"}, {"name": "1686", "dtype": "float32"}, {"name": "1687", "dtype": "float32"}, {"name": "1688", "dtype": "float32"}, {"name": "1689", "dtype": "float32"}, {"name": "1690", "dtype": "float32"}, {"name": "1691", "dtype": "float32"}, {"name": "1692", "dtype": "float32"}, {"name": "1693", "dtype": "float32"}, {"name": "1694", "dtype": "float32"}, {"name": "1695", "dtype": "float32"}, {"name": "1696", "dtype": "float32"}, {"name": "1697", "dtype": "float32"}, {"name": "1698", "dtype": "float32"}, {"name": "1699", "dtype": "float32"}, {"name": "1700", "dtype": "float32"}, {"name": "1701", "dtype": "float32"}, {"name": "1702", "dtype": "float32"}, {"name": "1703", "dtype": "float32"}, {"name": "1704", "dtype": "float32"}, {"name": "1705", "dtype": "float32"}, {"name": "1706", "dtype": "float32"}, {"name": "1707", "dtype": "float32"}, {"name": "1708", "dtype": "float32"}, {"name": "1709", "dtype": "float32"}, {"name": "1710", "dtype": "float32"}, {"name": "1711", "dtype": "float32"}, {"name": "1712", "dtype": "float32"}, {"name": "1713", "dtype": "float32"}, {"name": "1714", "dtype": "float32"}, {"name": "1715", "dtype": "float32"}, {"name": "1716", "dtype": "float32"}, {"name": "1717", "dtype": "float32"}, {"name": "1718", "dtype": "float32"}, {"name": "1719", "dtype": "float32"}, {"name": "1720", "dtype": "float32"}, {"name": "1721", "dtype": "float32"}, {"name": "1722", "dtype": "float32"}, {"name": "1723", "dtype": "float32"}, {"name": "1724", "dtype": "float32"}, {"name": "1725", "dtype": "float32"}, {"name": "1726", "dtype": "float32"}, {"name": "1727", "dtype": "float32"}, {"name": "1728", "dtype": "float32"}, {"name": "1729", "dtype": "float32"}, {"name": "1730", "dtype": "float32"}, {"name": "1731", "dtype": "float32"}, {"name": "1732", "dtype": "float32"}, {"name": "1733", "dtype": "float32"}, {"name": "1734", "dtype": "float32"}, {"name": "1735", "dtype": "float32"}, {"name": "1736", "dtype": "float32"}, {"name": "1737", "dtype": "float32"}, {"name": "1738", "dtype": "float32"}, {"name": "1739", "dtype": "float32"}, {"name": "1740", "dtype": "float32"}, {"name": "1741", "dtype": "float32"}, {"name": "1742", "dtype": "float32"}, {"name": "1743", "dtype": "float32"}, {"name": "1744", "dtype": "float32"}, {"name": "1745", "dtype": "float32"}, {"name": "1746", "dtype": "float32"}, {"name": "1747", "dtype": "float32"}, {"name": "1748", "dtype": "float32"}, {"name": "1749", "dtype": "float32"}, {"name": "1750", "dtype": "float32"}, {"name": "1751", "dtype": "float32"}, {"name": "1752", "dtype": "float32"}, {"name": "1753", "dtype": "float32"}, {"name": "1754", "dtype": "float32"}, {"name": "1755", "dtype": "float32"}, {"name": "1756", "dtype": "float32"}, {"name": "1757", "dtype": "float32"}, {"name": "1758", "dtype": "float32"}, {"name": "1759", "dtype": "float32"}, {"name": "1760", "dtype": "float32"}, {"name": "1761", "dtype": "float32"}, {"name": "1762", "dtype": "float32"}, {"name": "1763", "dtype": "float32"}, {"name": "1764", "dtype": "float32"}, {"name": "1765", "dtype": "float32"}, {"name": "1766", "dtype": "float32"}, {"name": "1767", "dtype": "float32"}, {"name": "1768", "dtype": "float32"}, {"name": "1769", "dtype": "float32"}, {"name": "1770", "dtype": "float32"}, {"name": "1771", "dtype": "float32"}, {"name": "1772", "dtype": "float32"}, {"name": "1773", "dtype": "float32"}, {"name": "1774", "dtype": "float32"}, {"name": "1775", "dtype": "float32"}, {"name": "1776", "dtype": "float32"}, {"name": "1777", "dtype": "float32"}, {"name": "1778", "dtype": "float32"}, {"name": "1779", "dtype": "float32"}, {"name": "1780", "dtype": "float32"}, {"name": "1781", "dtype": "float32"}, {"name": "1782", "dtype": "float32"}, {"name": "1783", "dtype": "float32"}, {"name": "1784", "dtype": "float32"}, {"name": "1785", "dtype": "float32"}, {"name": "1786", "dtype": "float32"}, {"name": "1787", "dtype": "float32"}, {"name": "1788", "dtype": "float32"}, {"name": "1789", "dtype": "float32"}, {"name": "1790", "dtype": "float32"}, {"name": "1791", "dtype": "float32"}, {"name": "1792", "dtype": "float32"}, {"name": "1793", "dtype": "float32"}, {"name": "1794", "dtype": "float32"}, {"name": "1795", "dtype": "float32"}, {"name": "1796", "dtype": "float32"}, {"name": "1797", "dtype": "float32"}, {"name": "1798", "dtype": "float32"}, {"name": "1799", "dtype": "float32"}, {"name": "1800", "dtype": "float32"}, {"name": "1801", "dtype": "float32"}, {"name": "1802", "dtype": "float32"}, {"name": "1803", "dtype": "float32"}, {"name": "1804", "dtype": "float32"}, {"name": "1805", "dtype": "float32"}, {"name": "1806", "dtype": "float32"}, {"name": "1807", "dtype": "float32"}, {"name": "1808", "dtype": "float32"}, {"name": "1809", "dtype": "float32"}, {"name": "1810", "dtype": "float32"}, {"name": "1811", "dtype": "float32"}, {"name": "1812", "dtype": "float32"}, {"name": "1813", "dtype": "float32"}, {"name": "1814", "dtype": "float32"}, {"name": "1815", "dtype": "float32"}, {"name": "1816", "dtype": "float32"}, {"name": "1817", "dtype": "float32"}, {"name": "1818", "dtype": "float32"}, {"name": "1819", "dtype": "float32"}, {"name": "1820", "dtype": "float32"}, {"name": "1821", "dtype": "float32"}, {"name": "1822", "dtype": "float32"}, {"name": "1823", "dtype": "float32"}, {"name": "1824", "dtype": "float32"}, {"name": "1825", "dtype": "float32"}, {"name": "1826", "dtype": "float32"}, {"name": "1827", "dtype": "float32"}, {"name": "1828", "dtype": "float32"}, {"name": "1829", "dtype": "float32"}, {"name": "1830", "dtype": "float32"}, {"name": "1831", "dtype": "float32"}, {"name": "1832", "dtype": "float32"}, {"name": "1833", "dtype": "float32"}, {"name": "1834", "dtype": "float32"}, {"name": "1835", "dtype": "float32"}, {"name": "1836", "dtype": "float32"}, {"name": "1837", "dtype": "float32"}, {"name": "1838", "dtype": "float32"}, {"name": "1839", "dtype": "float32"}, {"name": "1840", "dtype": "float32"}, {"name": "1841", "dtype": "float32"}, {"name": "1842", "dtype": "float32"}, {"name": "1843", "dtype": "float32"}, {"name": "1844", "dtype": "float32"}, {"name": "1845", "dtype": "float32"}, {"name": "1846", "dtype": "float32"}, {"name": "1847", "dtype": "float32"}, {"name": "1848", "dtype": "float32"}, {"name": "1849", "dtype": "float32"}, {"name": "1850", "dtype": "float32"}, {"name": "1851", "dtype": "float32"}, {"name": "1852", "dtype": "float32"}, {"name": "1853", "dtype": "float32"}, {"name": "1854", "dtype": "float32"}, {"name": "1855", "dtype": "float32"}, {"name": "1856", "dtype": "float32"}, {"name": "1857", "dtype": "float32"}, {"name": "1858", "dtype": "float32"}, {"name": "1859", "dtype": "float32"}, {"name": "1860", "dtype": "float32"}, {"name": "1861", "dtype": "float32"}, {"name": "1862", "dtype": "float32"}, {"name": "1863", "dtype": "float32"}, {"name": "1864", "dtype": "float32"}, {"name": "1865", "dtype": "float32"}, {"name": "1866", "dtype": "float32"}, {"name": "1867", "dtype": "float32"}, {"name": "1868", "dtype": "float32"}, {"name": "1869", "dtype": "float32"}, {"name": "1870", "dtype": "float32"}, {"name": "1871", "dtype": "float32"}, {"name": "1872", "dtype": "float32"}, {"name": "1873", "dtype": "float32"}, {"name": "1874", "dtype": "float32"}, {"name": "1875", "dtype": "float32"}, {"name": "1876", "dtype": "float32"}, {"name": "1877", "dtype": "float32"}, {"name": "1878", "dtype": "float32"}, {"name": "1879", "dtype": "float32"}, {"name": "1880", "dtype": "float32"}, {"name": "1881", "dtype": "float32"}, {"name": "1882", "dtype": "float32"}, {"name": "1883", "dtype": "float32"}, {"name": "1884", "dtype": "float32"}, {"name": "1885", "dtype": "float32"}, {"name": "1886", "dtype": "float32"}, {"name": "1887", "dtype": "float32"}, {"name": "1888", "dtype": "float32"}, {"name": "1889", "dtype": "float32"}, {"name": "1890", "dtype": "float32"}, {"name": "1891", "dtype": "float32"}, {"name": "1892", "dtype": "float32"}, {"name": "1893", "dtype": "float32"}, {"name": "1894", "dtype": "float32"}, {"name": "1895", "dtype": "float32"}, {"name": "1896", "dtype": "float32"}, {"name": "1897", "dtype": "float32"}, {"name": "1898", "dtype": "float32"}, {"name": "1899", "dtype": "float32"}, {"name": "1900", "dtype": "float32"}, {"name": "1901", "dtype": "float32"}, {"name": "1902", "dtype": "float32"}, {"name": "1903", "dtype": "float32"}, {"name": "1904", "dtype": "float32"}, {"name": "1905", "dtype": "float32"}, {"name": "1906", "dtype": "float32"}, {"name": "1907", "dtype": "float32"}, {"name": "1908", "dtype": "float32"}, {"name": "1909", "dtype": "float32"}, {"name": "1910", "dtype": "float32"}, {"name": "1911", "dtype": "float32"}, {"name": "1912", "dtype": "float32"}, {"name": "1913", "dtype": "float32"}, {"name": "1914", "dtype": "float32"}, {"name": "1915", "dtype": "float32"}, {"name": "1916", "dtype": "float32"}, {"name": "1917", "dtype": "float32"}, {"name": "1918", "dtype": "float32"}, {"name": "1919", "dtype": "float32"}, {"name": "1920", "dtype": "float32"}, {"name": "1921", "dtype": "float32"}, {"name": "1922", "dtype": "float32"}, {"name": "1923", "dtype": "float32"}, {"name": "1924", "dtype": "float32"}, {"name": "1925", "dtype": "float32"}, {"name": "1926", "dtype": "float32"}, {"name": "1927", "dtype": "float32"}, {"name": "1928", "dtype": "float32"}, {"name": "1929", "dtype": "float32"}, {"name": "1930", "dtype": "float32"}, {"name": "1931", "dtype": "float32"}, {"name": "1932", "dtype": "float32"}, {"name": "1933", "dtype": "float32"}, {"name": "1934", "dtype": "float32"}, {"name": "1935", "dtype": "float32"}, {"name": "1936", "dtype": "float32"}, {"name": "1937", "dtype": "float32"}, {"name": "1938", "dtype": "float32"}, {"name": "1939", "dtype": "float32"}, {"name": "1940", "dtype": "float32"}, {"name": "1941", "dtype": "float32"}, {"name": "1942", "dtype": "float32"}, {"name": "1943", "dtype": "float32"}, {"name": "1944", "dtype": "float32"}, {"name": "1945", "dtype": "float32"}, {"name": "1946", "dtype": "float32"}, {"name": "1947", "dtype": "float32"}, {"name": "1948", "dtype": "float32"}, {"name": "1949", "dtype": "float32"}, {"name": "1950", "dtype": "float32"}, {"name": "1951", "dtype": "float32"}, {"name": "1952", "dtype": "float32"}, {"name": "1953", "dtype": "float32"}, {"name": "1954", "dtype": "float32"}, {"name": "1955", "dtype": "float32"}, {"name": "1956", "dtype": "float32"}, {"name": "1957", "dtype": "float32"}, {"name": "1958", "dtype": "float32"}, {"name": "1959", "dtype": "float32"}, {"name": "1960", "dtype": "float32"}, {"name": "1961", "dtype": "float32"}, {"name": "1962", "dtype": "float32"}, {"name": "1963", "dtype": "float32"}, {"name": "1964", "dtype": "float32"}, {"name": "1965", "dtype": "float32"}, {"name": "1966", "dtype": "float32"}, {"name": "1967", "dtype": "float32"}, {"name": "1968", "dtype": "float32"}, {"name": "1969", "dtype": "float32"}, {"name": "1970", "dtype": "float32"}, {"name": "1971", "dtype": "float32"}, {"name": "1972", "dtype": "float32"}, {"name": "1973", "dtype": "float32"}, {"name": "1974", "dtype": "float32"}, {"name": "1975", "dtype": "float32"}, {"name": "1976", "dtype": "float32"}, {"name": "1977", "dtype": "float32"}, {"name": "1978", "dtype": "float32"}, {"name": "1979", "dtype": "float32"}, {"name": "1980", "dtype": "float32"}, {"name": "1981", "dtype": "float32"}, {"name": "1982", "dtype": "float32"}, {"name": "1983", "dtype": "float32"}, {"name": "1984", "dtype": "float32"}, {"name": "1985", "dtype": "float32"}, {"name": "1986", "dtype": "float32"}, {"name": "1987", "dtype": "float32"}, {"name": "1988", "dtype": "float32"}, {"name": "1989", "dtype": "float32"}, {"name": "1990", "dtype": "float32"}, {"name": "1991", "dtype": "float32"}, {"name": "1992", "dtype": "float32"}, {"name": "1993", "dtype": "float32"}, {"name": "1994", "dtype": "float32"}, {"name": "1995", "dtype": "float32"}, {"name": "1996", "dtype": "float32"}, {"name": "1997", "dtype": "float32"}, {"name": "1998", "dtype": "float32"}, {"name": "1999", "dtype": "float32"}, {"name": "2000", "dtype": "float32"}, {"name": "2001", "dtype": "float32"}, {"name": "2002", "dtype": "float32"}, {"name": "2003", "dtype": "float32"}, {"name": "2004", "dtype": "float32"}, {"name": "2005", "dtype": "float32"}, {"name": "2006", "dtype": "float32"}, {"name": "2007", "dtype": "float32"}, {"name": "2008", "dtype": "float32"}, {"name": "2009", "dtype": "float32"}, {"name": "2010", "dtype": "float32"}, {"name": "2011", "dtype": "float32"}, {"name": "2012", "dtype": "float32"}, {"name": "2013", "dtype": "float32"}, {"name": "2014", "dtype": "float32"}, {"name": "2015", "dtype": "float32"}, {"name": "2016", "dtype": "float32"}, {"name": "2017", "dtype": "float32"}, {"name": "2018", "dtype": "float32"}, {"name": "2019", "dtype": "float32"}, {"name": "2020", "dtype": "float32"}, {"name": "2021", "dtype": "float32"}, {"name": "2022", "dtype": "float32"}, {"name": "2023", "dtype": "float32"}, {"name": "2024", "dtype": "float32"}, {"name": "2025", "dtype": "float32"}, {"name": "2026", "dtype": "float32"}, {"name": "2027", "dtype": "float32"}, {"name": "2028", "dtype": "float32"}, {"name": "2029", "dtype": "float32"}, {"name": "2030", "dtype": "float32"}, {"name": "2031", "dtype": "float32"}, {"name": "2032", "dtype": "float32"}, {"name": "2033", "dtype": "float32"}, {"name": "2034", "dtype": "float32"}, {"name": "2035", "dtype": "float32"}, {"name": "2036", "dtype": "float32"}, {"name": "2037", "dtype": "float32"}, {"name": "2038", "dtype": "float32"}, {"name": "2039", "dtype": "float32"}, {"name": "2040", "dtype": "float32"}, {"name": "2041", "dtype": "float32"}, {"name": "2042", "dtype": "float32"}, {"name": "2043", "dtype": "float32"}, {"name": "2044", "dtype": "float32"}, {"name": "2045", "dtype": "float32"}, {"name": "2046", "dtype": "float32"}, {"name": "2047", "dtype": "float32"}, {"name": "label", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 307650065.625, "num_examples": 37500}, {"name": "test", "num_bytes": 102550020.0, "num_examples": 12500}], "download_size": 565195315, "dataset_size": 410200085.625}}
2023-08-23T04:55:17+00:00
[]
[]
TAGS #region-us
# Dataset Card for "Spirit_GPTNEO_Finetuned" More Information needed
[ "# Dataset Card for \"Spirit_GPTNEO_Finetuned\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"Spirit_GPTNEO_Finetuned\"\n\nMore Information needed" ]
[ 6, 22 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"Spirit_GPTNEO_Finetuned\"\n\nMore Information needed" ]
997bc69c3a491435fbfab1e094cffa737bd81265
# Dataset Card for Evaluation run of acrastt/Griffin-3B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/acrastt/Griffin-3B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [acrastt/Griffin-3B](https://huggingface.co/acrastt/Griffin-3B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_acrastt__Griffin-3B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-17T18:06:21.492354](https://huggingface.co/datasets/open-llm-leaderboard/details_acrastt__Griffin-3B/blob/main/results_2023-10-17T18-06-21.492354.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0012583892617449664, "em_stderr": 0.000363056089311903, "f1": 0.05389366610738276, "f1_stderr": 0.0012796475133266298, "acc": 0.33997138536824134, "acc_stderr": 0.007967754559779602 }, "harness|drop|3": { "em": 0.0012583892617449664, "em_stderr": 0.000363056089311903, "f1": 0.05389366610738276, "f1_stderr": 0.0012796475133266298 }, "harness|gsm8k|5": { "acc": 0.009855951478392721, "acc_stderr": 0.0027210765770416642 }, "harness|winogrande|5": { "acc": 0.67008681925809, "acc_stderr": 0.013214432542517538 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_acrastt__Griffin-3B
[ "region:us" ]
2023-08-18T17:45:32+00:00
{"pretty_name": "Evaluation run of acrastt/Griffin-3B", "dataset_summary": "Dataset automatically created during the evaluation run of model [acrastt/Griffin-3B](https://huggingface.co/acrastt/Griffin-3B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_acrastt__Griffin-3B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-17T18:06:21.492354](https://huggingface.co/datasets/open-llm-leaderboard/details_acrastt__Griffin-3B/blob/main/results_2023-10-17T18-06-21.492354.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0012583892617449664,\n \"em_stderr\": 0.000363056089311903,\n \"f1\": 0.05389366610738276,\n \"f1_stderr\": 0.0012796475133266298,\n \"acc\": 0.33997138536824134,\n \"acc_stderr\": 0.007967754559779602\n },\n \"harness|drop|3\": {\n \"em\": 0.0012583892617449664,\n \"em_stderr\": 0.000363056089311903,\n \"f1\": 0.05389366610738276,\n \"f1_stderr\": 0.0012796475133266298\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.009855951478392721,\n \"acc_stderr\": 0.0027210765770416642\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.67008681925809,\n \"acc_stderr\": 0.013214432542517538\n }\n}\n```", "repo_url": "https://huggingface.co/acrastt/Griffin-3B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|arc:challenge|25_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|arc:challenge|25_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_17T18_06_21.492354", "path": ["**/details_harness|drop|3_2023-10-17T18-06-21.492354.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-17T18-06-21.492354.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_17T18_06_21.492354", "path": ["**/details_harness|gsm8k|5_2023-10-17T18-06-21.492354.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-17T18-06-21.492354.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hellaswag|10_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hellaswag|10_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T03:59:18.128878.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T04:28:39.575079.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T04:28:39.575079.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T04:28:39.575079.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_17T18_06_21.492354", "path": ["**/details_harness|winogrande|5_2023-10-17T18-06-21.492354.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-17T18-06-21.492354.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_18T03_59_18.128878", "path": ["results_2023-08-18T03:59:18.128878.parquet"]}, {"split": "2023_08_18T04_28_39.575079", "path": ["results_2023-08-18T04:28:39.575079.parquet"]}, {"split": "2023_10_17T18_06_21.492354", "path": ["results_2023-10-17T18-06-21.492354.parquet"]}, {"split": "latest", "path": ["results_2023-10-17T18-06-21.492354.parquet"]}]}]}
2023-10-17T17:06:34+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of acrastt/Griffin-3B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model acrastt/Griffin-3B on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-17T18:06:21.492354(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of acrastt/Griffin-3B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model acrastt/Griffin-3B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-17T18:06:21.492354(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of acrastt/Griffin-3B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model acrastt/Griffin-3B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-17T18:06:21.492354(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 17, 31, 165, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of acrastt/Griffin-3B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model acrastt/Griffin-3B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-17T18:06:21.492354(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
20d3eb44b86bb207adb8b458bd40b51f60c3514f
# Dataset Card for Evaluation run of acrastt/Puma-3B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/acrastt/Puma-3B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [acrastt/Puma-3B](https://huggingface.co/acrastt/Puma-3B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 5 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_acrastt__Puma-3B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-18T02:31:20.356957](https://huggingface.co/datasets/open-llm-leaderboard/details_acrastt__Puma-3B/blob/main/results_2023-10-18T02-31-20.356957.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0008389261744966443, "em_stderr": 0.0002964962989801232, "f1": 0.05384123322147677, "f1_stderr": 0.0012835343678864997, "acc": 0.335677096267113, "acc_stderr": 0.00783328405303534 }, "harness|drop|3": { "em": 0.0008389261744966443, "em_stderr": 0.0002964962989801232, "f1": 0.05384123322147677, "f1_stderr": 0.0012835343678864997 }, "harness|gsm8k|5": { "acc": 0.0075815011372251705, "acc_stderr": 0.0023892815120772322 }, "harness|winogrande|5": { "acc": 0.6637726913970008, "acc_stderr": 0.013277286593993447 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_acrastt__Puma-3B
[ "region:us" ]
2023-08-18T17:45:42+00:00
{"pretty_name": "Evaluation run of acrastt/Puma-3B", "dataset_summary": "Dataset automatically created during the evaluation run of model [acrastt/Puma-3B](https://huggingface.co/acrastt/Puma-3B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 5 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_acrastt__Puma-3B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-18T02:31:20.356957](https://huggingface.co/datasets/open-llm-leaderboard/details_acrastt__Puma-3B/blob/main/results_2023-10-18T02-31-20.356957.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0008389261744966443,\n \"em_stderr\": 0.0002964962989801232,\n \"f1\": 0.05384123322147677,\n \"f1_stderr\": 0.0012835343678864997,\n \"acc\": 0.335677096267113,\n \"acc_stderr\": 0.00783328405303534\n },\n \"harness|drop|3\": {\n \"em\": 0.0008389261744966443,\n \"em_stderr\": 0.0002964962989801232,\n \"f1\": 0.05384123322147677,\n \"f1_stderr\": 0.0012835343678864997\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0075815011372251705,\n \"acc_stderr\": 0.0023892815120772322\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6637726913970008,\n \"acc_stderr\": 0.013277286593993447\n }\n}\n```", "repo_url": "https://huggingface.co/acrastt/Puma-3B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|arc:challenge|25_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|arc:challenge|25_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|arc:challenge|25_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|arc:challenge|25_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_18T02_31_20.356957", "path": ["**/details_harness|drop|3_2023-10-18T02-31-20.356957.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-18T02-31-20.356957.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_18T02_31_20.356957", "path": ["**/details_harness|gsm8k|5_2023-10-18T02-31-20.356957.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-18T02-31-20.356957.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hellaswag|10_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hellaswag|10_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hellaswag|10_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hellaswag|10_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T19:20:55.722583.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T23:25:19.325666.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T23:52:04.586597.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T01:05:27.057546.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T01:05:27.057546.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T01:05:27.057546.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_18T02_31_20.356957", "path": ["**/details_harness|winogrande|5_2023-10-18T02-31-20.356957.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-18T02-31-20.356957.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T19_20_55.722583", "path": ["results_2023-08-17T19:20:55.722583.parquet"]}, {"split": "2023_08_17T23_25_19.325666", "path": ["results_2023-08-17T23:25:19.325666.parquet"]}, {"split": "2023_08_17T23_52_04.586597", "path": ["results_2023-08-17T23:52:04.586597.parquet"]}, {"split": "2023_08_18T01_05_27.057546", "path": ["results_2023-08-18T01:05:27.057546.parquet"]}, {"split": "2023_10_18T02_31_20.356957", "path": ["results_2023-10-18T02-31-20.356957.parquet"]}, {"split": "latest", "path": ["results_2023-10-18T02-31-20.356957.parquet"]}]}]}
2023-10-18T01:31:33+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of acrastt/Puma-3B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model acrastt/Puma-3B on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 5 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-18T02:31:20.356957(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of acrastt/Puma-3B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model acrastt/Puma-3B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 5 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-18T02:31:20.356957(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of acrastt/Puma-3B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model acrastt/Puma-3B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 5 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-18T02:31:20.356957(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 17, 31, 165, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of acrastt/Puma-3B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model acrastt/Puma-3B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 5 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-18T02:31:20.356957(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
47294c6e198e0f3085d802e3aa3af20bd3dddb12
# Dataset Card for Evaluation run of CalderaAI/30B-Epsilon ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/CalderaAI/30B-Epsilon - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [CalderaAI/30B-Epsilon](https://huggingface.co/CalderaAI/30B-Epsilon) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_CalderaAI__30B-Epsilon", "harness_gsm8k_5", split="train") ``` ## Latest results These are the [latest results from run 2023-12-02T15:01:08.880467](https://huggingface.co/datasets/open-llm-leaderboard/details_CalderaAI__30B-Epsilon/blob/main/results_2023-12-02T15-01-08.880467.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.24564063684609552, "acc_stderr": 0.011857183603902225 }, "harness|gsm8k|5": { "acc": 0.24564063684609552, "acc_stderr": 0.011857183603902225 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_CalderaAI__30B-Epsilon
[ "region:us" ]
2023-08-18T17:46:16+00:00
{"pretty_name": "Evaluation run of CalderaAI/30B-Epsilon", "dataset_summary": "Dataset automatically created during the evaluation run of model [CalderaAI/30B-Epsilon](https://huggingface.co/CalderaAI/30B-Epsilon) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_CalderaAI__30B-Epsilon\",\n\t\"harness_gsm8k_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-12-02T15:01:08.880467](https://huggingface.co/datasets/open-llm-leaderboard/details_CalderaAI__30B-Epsilon/blob/main/results_2023-12-02T15-01-08.880467.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.24564063684609552,\n \"acc_stderr\": 0.011857183603902225\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.24564063684609552,\n \"acc_stderr\": 0.011857183603902225\n }\n}\n```", "repo_url": "https://huggingface.co/CalderaAI/30B-Epsilon", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|arc:challenge|25_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_09T14_24_13.994751", "path": ["**/details_harness|drop|3_2023-09-09T14-24-13.994751.parquet"]}, {"split": "2023_09_23T06_45_40.292570", "path": ["**/details_harness|drop|3_2023-09-23T06-45-40.292570.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-23T06-45-40.292570.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_09T14_24_13.994751", "path": ["**/details_harness|gsm8k|5_2023-09-09T14-24-13.994751.parquet"]}, {"split": "2023_09_23T06_45_40.292570", "path": ["**/details_harness|gsm8k|5_2023-09-23T06-45-40.292570.parquet"]}, {"split": "2023_12_02T15_01_08.880467", "path": ["**/details_harness|gsm8k|5_2023-12-02T15-01-08.880467.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-12-02T15-01-08.880467.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hellaswag|10_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T19:47:15.382915.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T19:47:15.382915.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T19:47:15.382915.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_09T14_24_13.994751", "path": ["**/details_harness|winogrande|5_2023-09-09T14-24-13.994751.parquet"]}, {"split": "2023_09_23T06_45_40.292570", "path": ["**/details_harness|winogrande|5_2023-09-23T06-45-40.292570.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-23T06-45-40.292570.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T19_47_15.382915", "path": ["results_2023-08-17T19:47:15.382915.parquet"]}, {"split": "2023_09_09T14_24_13.994751", "path": ["results_2023-09-09T14-24-13.994751.parquet"]}, {"split": "2023_09_23T06_45_40.292570", "path": ["results_2023-09-23T06-45-40.292570.parquet"]}, {"split": "2023_12_02T15_01_08.880467", "path": ["results_2023-12-02T15-01-08.880467.parquet"]}, {"split": "latest", "path": ["results_2023-12-02T15-01-08.880467.parquet"]}]}]}
2023-12-02T15:01:16+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of CalderaAI/30B-Epsilon ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model CalderaAI/30B-Epsilon on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-12-02T15:01:08.880467(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of CalderaAI/30B-Epsilon", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model CalderaAI/30B-Epsilon on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-02T15:01:08.880467(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of CalderaAI/30B-Epsilon", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model CalderaAI/30B-Epsilon on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-12-02T15:01:08.880467(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 18, 31, 167, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of CalderaAI/30B-Epsilon## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model CalderaAI/30B-Epsilon on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-12-02T15:01:08.880467(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
992a74de9b9bd6a995a40dd0793a8adbad6388a2
# Dataset Card for Evaluation run of quantumaikr/llama-2-70b-fb16-orca-chat-10k ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/quantumaikr/llama-2-70b-fb16-orca-chat-10k - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [quantumaikr/llama-2-70b-fb16-orca-chat-10k](https://huggingface.co/quantumaikr/llama-2-70b-fb16-orca-chat-10k) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_quantumaikr__llama-2-70b-fb16-orca-chat-10k", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-18T08:24:33.430081](https://huggingface.co/datasets/open-llm-leaderboard/details_quantumaikr__llama-2-70b-fb16-orca-chat-10k/blob/main/results_2023-10-18T08-24-33.430081.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0028313758389261743, "em_stderr": 0.0005441551135494018, "f1": 0.0711283557046983, "f1_stderr": 0.001478786284269493, "acc": 0.5552504139308139, "acc_stderr": 0.011242265850160478 }, "harness|drop|3": { "em": 0.0028313758389261743, "em_stderr": 0.0005441551135494018, "f1": 0.0711283557046983, "f1_stderr": 0.001478786284269493 }, "harness|gsm8k|5": { "acc": 0.26914329037149354, "acc_stderr": 0.012216595457292733 }, "harness|winogrande|5": { "acc": 0.8413575374901342, "acc_stderr": 0.010267936243028223 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_quantumaikr__llama-2-70b-fb16-orca-chat-10k
[ "region:us" ]
2023-08-18T17:46:27+00:00
{"pretty_name": "Evaluation run of quantumaikr/llama-2-70b-fb16-orca-chat-10k", "dataset_summary": "Dataset automatically created during the evaluation run of model [quantumaikr/llama-2-70b-fb16-orca-chat-10k](https://huggingface.co/quantumaikr/llama-2-70b-fb16-orca-chat-10k) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_quantumaikr__llama-2-70b-fb16-orca-chat-10k\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-18T08:24:33.430081](https://huggingface.co/datasets/open-llm-leaderboard/details_quantumaikr__llama-2-70b-fb16-orca-chat-10k/blob/main/results_2023-10-18T08-24-33.430081.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0028313758389261743,\n \"em_stderr\": 0.0005441551135494018,\n \"f1\": 0.0711283557046983,\n \"f1_stderr\": 0.001478786284269493,\n \"acc\": 0.5552504139308139,\n \"acc_stderr\": 0.011242265850160478\n },\n \"harness|drop|3\": {\n \"em\": 0.0028313758389261743,\n \"em_stderr\": 0.0005441551135494018,\n \"f1\": 0.0711283557046983,\n \"f1_stderr\": 0.001478786284269493\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.26914329037149354,\n \"acc_stderr\": 0.012216595457292733\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8413575374901342,\n \"acc_stderr\": 0.010267936243028223\n }\n}\n```", "repo_url": "https://huggingface.co/quantumaikr/llama-2-70b-fb16-orca-chat-10k", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|arc:challenge|25_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_18T08_24_33.430081", "path": ["**/details_harness|drop|3_2023-10-18T08-24-33.430081.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-18T08-24-33.430081.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_18T08_24_33.430081", "path": ["**/details_harness|gsm8k|5_2023-10-18T08-24-33.430081.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-18T08-24-33.430081.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hellaswag|10_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T21:37:12.844888.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T21:37:12.844888.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T21:37:12.844888.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_18T08_24_33.430081", "path": ["**/details_harness|winogrande|5_2023-10-18T08-24-33.430081.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-18T08-24-33.430081.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T21_37_12.844888", "path": ["results_2023-08-17T21:37:12.844888.parquet"]}, {"split": "2023_10_18T08_24_33.430081", "path": ["results_2023-10-18T08-24-33.430081.parquet"]}, {"split": "latest", "path": ["results_2023-10-18T08-24-33.430081.parquet"]}]}]}
2023-10-18T07:24:46+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of quantumaikr/llama-2-70b-fb16-orca-chat-10k ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model quantumaikr/llama-2-70b-fb16-orca-chat-10k on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-18T08:24:33.430081(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of quantumaikr/llama-2-70b-fb16-orca-chat-10k", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model quantumaikr/llama-2-70b-fb16-orca-chat-10k on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-18T08:24:33.430081(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of quantumaikr/llama-2-70b-fb16-orca-chat-10k", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model quantumaikr/llama-2-70b-fb16-orca-chat-10k on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-18T08:24:33.430081(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 29, 31, 177, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of quantumaikr/llama-2-70b-fb16-orca-chat-10k## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model quantumaikr/llama-2-70b-fb16-orca-chat-10k on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-18T08:24:33.430081(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
43d25a42e304f16c348cf412e3c3150d922a4af4
# Dataset Card for Evaluation run of sia-ai/llama-2-7b-1-percent-open-orca-1000-steps-v0 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/sia-ai/llama-2-7b-1-percent-open-orca-1000-steps-v0 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [sia-ai/llama-2-7b-1-percent-open-orca-1000-steps-v0](https://huggingface.co/sia-ai/llama-2-7b-1-percent-open-orca-1000-steps-v0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_sia-ai__llama-2-7b-1-percent-open-orca-1000-steps-v0", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-23T07:22:00.656974](https://huggingface.co/datasets/open-llm-leaderboard/details_sia-ai__llama-2-7b-1-percent-open-orca-1000-steps-v0/blob/main/results_2023-10-23T07-22-00.656974.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.08672399328859061, "em_stderr": 0.0028821083852410504, "f1": 0.13875838926174472, "f1_stderr": 0.0030398596200052856, "acc": 0.384207080894677, "acc_stderr": 0.00839930559560157 }, "harness|drop|3": { "em": 0.08672399328859061, "em_stderr": 0.0028821083852410504, "f1": 0.13875838926174472, "f1_stderr": 0.0030398596200052856 }, "harness|gsm8k|5": { "acc": 0.027293404094010616, "acc_stderr": 0.004488095380209766 }, "harness|winogrande|5": { "acc": 0.7411207576953434, "acc_stderr": 0.012310515810993376 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_sia-ai__llama-2-7b-1-percent-open-orca-1000-steps-v0
[ "region:us" ]
2023-08-18T17:46:42+00:00
{"pretty_name": "Evaluation run of sia-ai/llama-2-7b-1-percent-open-orca-1000-steps-v0", "dataset_summary": "Dataset automatically created during the evaluation run of model [sia-ai/llama-2-7b-1-percent-open-orca-1000-steps-v0](https://huggingface.co/sia-ai/llama-2-7b-1-percent-open-orca-1000-steps-v0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_sia-ai__llama-2-7b-1-percent-open-orca-1000-steps-v0\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-23T07:22:00.656974](https://huggingface.co/datasets/open-llm-leaderboard/details_sia-ai__llama-2-7b-1-percent-open-orca-1000-steps-v0/blob/main/results_2023-10-23T07-22-00.656974.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.08672399328859061,\n \"em_stderr\": 0.0028821083852410504,\n \"f1\": 0.13875838926174472,\n \"f1_stderr\": 0.0030398596200052856,\n \"acc\": 0.384207080894677,\n \"acc_stderr\": 0.00839930559560157\n },\n \"harness|drop|3\": {\n \"em\": 0.08672399328859061,\n \"em_stderr\": 0.0028821083852410504,\n \"f1\": 0.13875838926174472,\n \"f1_stderr\": 0.0030398596200052856\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.027293404094010616,\n \"acc_stderr\": 0.004488095380209766\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7411207576953434,\n \"acc_stderr\": 0.012310515810993376\n }\n}\n```", "repo_url": "https://huggingface.co/sia-ai/llama-2-7b-1-percent-open-orca-1000-steps-v0", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|arc:challenge|25_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_23T07_22_00.656974", "path": ["**/details_harness|drop|3_2023-10-23T07-22-00.656974.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-23T07-22-00.656974.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_23T07_22_00.656974", "path": ["**/details_harness|gsm8k|5_2023-10-23T07-22-00.656974.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-23T07-22-00.656974.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hellaswag|10_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T13:55:57.511446.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T13:55:57.511446.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T13:55:57.511446.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_23T07_22_00.656974", "path": ["**/details_harness|winogrande|5_2023-10-23T07-22-00.656974.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-23T07-22-00.656974.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_18T13_55_57.511446", "path": ["results_2023-08-18T13:55:57.511446.parquet"]}, {"split": "2023_10_23T07_22_00.656974", "path": ["results_2023-10-23T07-22-00.656974.parquet"]}, {"split": "latest", "path": ["results_2023-10-23T07-22-00.656974.parquet"]}]}]}
2023-10-23T06:22:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of sia-ai/llama-2-7b-1-percent-open-orca-1000-steps-v0 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model sia-ai/llama-2-7b-1-percent-open-orca-1000-steps-v0 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-23T07:22:00.656974(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of sia-ai/llama-2-7b-1-percent-open-orca-1000-steps-v0", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model sia-ai/llama-2-7b-1-percent-open-orca-1000-steps-v0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T07:22:00.656974(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of sia-ai/llama-2-7b-1-percent-open-orca-1000-steps-v0", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model sia-ai/llama-2-7b-1-percent-open-orca-1000-steps-v0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T07:22:00.656974(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 35, 31, 183, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of sia-ai/llama-2-7b-1-percent-open-orca-1000-steps-v0## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model sia-ai/llama-2-7b-1-percent-open-orca-1000-steps-v0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-23T07:22:00.656974(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
186674bcdf94e30b5ebf8c2b2e0aa571740166e7
# Dataset Card for Evaluation run of circulus/Llama-2-13b-orca-v1 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/circulus/Llama-2-13b-orca-v1 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [circulus/Llama-2-13b-orca-v1](https://huggingface.co/circulus/Llama-2-13b-orca-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_circulus__Llama-2-13b-orca-v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-17T12:50:57.881579](https://huggingface.co/datasets/open-llm-leaderboard/details_circulus__Llama-2-13b-orca-v1/blob/main/results_2023-09-17T12-50-57.881579.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.1529991610738255, "em_stderr": 0.0036866006582882706, "f1": 0.2246581375838923, "f1_stderr": 0.003770616290655452, "acc": 0.44842066021890015, "acc_stderr": 0.010546865226614108 }, "harness|drop|3": { "em": 0.1529991610738255, "em_stderr": 0.0036866006582882706, "f1": 0.2246581375838923, "f1_stderr": 0.003770616290655452 }, "harness|gsm8k|5": { "acc": 0.1288855193328279, "acc_stderr": 0.009229580761400274 }, "harness|winogrande|5": { "acc": 0.7679558011049724, "acc_stderr": 0.011864149691827943 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_circulus__Llama-2-13b-orca-v1
[ "region:us" ]
2023-08-18T17:46:51+00:00
{"pretty_name": "Evaluation run of circulus/Llama-2-13b-orca-v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [circulus/Llama-2-13b-orca-v1](https://huggingface.co/circulus/Llama-2-13b-orca-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_circulus__Llama-2-13b-orca-v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-17T12:50:57.881579](https://huggingface.co/datasets/open-llm-leaderboard/details_circulus__Llama-2-13b-orca-v1/blob/main/results_2023-09-17T12-50-57.881579.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.1529991610738255,\n \"em_stderr\": 0.0036866006582882706,\n \"f1\": 0.2246581375838923,\n \"f1_stderr\": 0.003770616290655452,\n \"acc\": 0.44842066021890015,\n \"acc_stderr\": 0.010546865226614108\n },\n \"harness|drop|3\": {\n \"em\": 0.1529991610738255,\n \"em_stderr\": 0.0036866006582882706,\n \"f1\": 0.2246581375838923,\n \"f1_stderr\": 0.003770616290655452\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.1288855193328279,\n \"acc_stderr\": 0.009229580761400274\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7679558011049724,\n \"acc_stderr\": 0.011864149691827943\n }\n}\n```", "repo_url": "https://huggingface.co/circulus/Llama-2-13b-orca-v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|arc:challenge|25_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_17T07_53_18.900339", "path": ["**/details_harness|drop|3_2023-09-17T07-53-18.900339.parquet"]}, {"split": "2023_09_17T12_50_57.881579", "path": ["**/details_harness|drop|3_2023-09-17T12-50-57.881579.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-17T12-50-57.881579.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_17T07_53_18.900339", "path": ["**/details_harness|gsm8k|5_2023-09-17T07-53-18.900339.parquet"]}, {"split": "2023_09_17T12_50_57.881579", "path": ["**/details_harness|gsm8k|5_2023-09-17T12-50-57.881579.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-17T12-50-57.881579.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hellaswag|10_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T08:46:04.009114.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T08:46:04.009114.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T08:46:04.009114.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_17T07_53_18.900339", "path": ["**/details_harness|winogrande|5_2023-09-17T07-53-18.900339.parquet"]}, {"split": "2023_09_17T12_50_57.881579", "path": ["**/details_harness|winogrande|5_2023-09-17T12-50-57.881579.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-17T12-50-57.881579.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_18T08_46_04.009114", "path": ["results_2023-08-18T08:46:04.009114.parquet"]}, {"split": "2023_09_17T07_53_18.900339", "path": ["results_2023-09-17T07-53-18.900339.parquet"]}, {"split": "2023_09_17T12_50_57.881579", "path": ["results_2023-09-17T12-50-57.881579.parquet"]}, {"split": "latest", "path": ["results_2023-09-17T12-50-57.881579.parquet"]}]}]}
2023-09-17T11:51:10+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of circulus/Llama-2-13b-orca-v1 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model circulus/Llama-2-13b-orca-v1 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-17T12:50:57.881579(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of circulus/Llama-2-13b-orca-v1", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model circulus/Llama-2-13b-orca-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T12:50:57.881579(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of circulus/Llama-2-13b-orca-v1", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model circulus/Llama-2-13b-orca-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T12:50:57.881579(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 23, 31, 171, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of circulus/Llama-2-13b-orca-v1## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model circulus/Llama-2-13b-orca-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-17T12:50:57.881579(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
2d15fc4a5094bdaea3ea0d72f02b72aada2f4400
# Dataset Card for Evaluation run of garage-bAInd/Camel-Platypus2-70B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/garage-bAInd/Camel-Platypus2-70B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [garage-bAInd/Camel-Platypus2-70B](https://huggingface.co/garage-bAInd/Camel-Platypus2-70B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_garage-bAInd__Camel-Platypus2-70B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-16T06:37:05.018958](https://huggingface.co/datasets/open-llm-leaderboard/details_garage-bAInd__Camel-Platypus2-70B/blob/main/results_2023-10-16T06-37-05.018958.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.5069211409395973, "em_stderr": 0.0051199774044148345, "f1": 0.559724203020135, "f1_stderr": 0.004829732229468497, "acc": 0.5345469918434537, "acc_stderr": 0.01116294273345166 }, "harness|drop|3": { "em": 0.5069211409395973, "em_stderr": 0.0051199774044148345, "f1": 0.559724203020135, "f1_stderr": 0.004829732229468497 }, "harness|gsm8k|5": { "acc": 0.2395754359363154, "acc_stderr": 0.01175686434407741 }, "harness|winogrande|5": { "acc": 0.829518547750592, "acc_stderr": 0.010569021122825909 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_garage-bAInd__Camel-Platypus2-70B
[ "region:us" ]
2023-08-18T17:47:01+00:00
{"pretty_name": "Evaluation run of garage-bAInd/Camel-Platypus2-70B", "dataset_summary": "Dataset automatically created during the evaluation run of model [garage-bAInd/Camel-Platypus2-70B](https://huggingface.co/garage-bAInd/Camel-Platypus2-70B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_garage-bAInd__Camel-Platypus2-70B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-16T06:37:05.018958](https://huggingface.co/datasets/open-llm-leaderboard/details_garage-bAInd__Camel-Platypus2-70B/blob/main/results_2023-10-16T06-37-05.018958.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.5069211409395973,\n \"em_stderr\": 0.0051199774044148345,\n \"f1\": 0.559724203020135,\n \"f1_stderr\": 0.004829732229468497,\n \"acc\": 0.5345469918434537,\n \"acc_stderr\": 0.01116294273345166\n },\n \"harness|drop|3\": {\n \"em\": 0.5069211409395973,\n \"em_stderr\": 0.0051199774044148345,\n \"f1\": 0.559724203020135,\n \"f1_stderr\": 0.004829732229468497\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.2395754359363154,\n \"acc_stderr\": 0.01175686434407741\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.829518547750592,\n \"acc_stderr\": 0.010569021122825909\n }\n}\n```", "repo_url": "https://huggingface.co/garage-bAInd/Camel-Platypus2-70B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|arc:challenge|25_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_23T09_15_03.498663", "path": ["**/details_harness|drop|3_2023-09-23T09-15-03.498663.parquet"]}, {"split": "2023_10_16T06_37_05.018958", "path": ["**/details_harness|drop|3_2023-10-16T06-37-05.018958.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-16T06-37-05.018958.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_23T09_15_03.498663", "path": ["**/details_harness|gsm8k|5_2023-09-23T09-15-03.498663.parquet"]}, {"split": "2023_10_16T06_37_05.018958", "path": ["**/details_harness|gsm8k|5_2023-10-16T06-37-05.018958.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-16T06-37-05.018958.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hellaswag|10_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T00:04:49.359575.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T00:04:49.359575.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T00:04:49.359575.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_23T09_15_03.498663", "path": ["**/details_harness|winogrande|5_2023-09-23T09-15-03.498663.parquet"]}, {"split": "2023_10_16T06_37_05.018958", "path": ["**/details_harness|winogrande|5_2023-10-16T06-37-05.018958.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-16T06-37-05.018958.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_18T00_04_49.359575", "path": ["results_2023-08-18T00:04:49.359575.parquet"]}, {"split": "2023_09_23T09_15_03.498663", "path": ["results_2023-09-23T09-15-03.498663.parquet"]}, {"split": "2023_10_16T06_37_05.018958", "path": ["results_2023-10-16T06-37-05.018958.parquet"]}, {"split": "latest", "path": ["results_2023-10-16T06-37-05.018958.parquet"]}]}]}
2023-10-16T05:37:18+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of garage-bAInd/Camel-Platypus2-70B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model garage-bAInd/Camel-Platypus2-70B on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-16T06:37:05.018958(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of garage-bAInd/Camel-Platypus2-70B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model garage-bAInd/Camel-Platypus2-70B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-16T06:37:05.018958(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of garage-bAInd/Camel-Platypus2-70B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model garage-bAInd/Camel-Platypus2-70B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-16T06:37:05.018958(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 24, 31, 172, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of garage-bAInd/Camel-Platypus2-70B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model garage-bAInd/Camel-Platypus2-70B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-16T06:37:05.018958(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
0330f78cc0c9d7dd0f18dfd7fc26e6bf70d06866
# Dataset Card for Evaluation run of Fredithefish/ReasonixPajama-3B-HF ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Fredithefish/ReasonixPajama-3B-HF - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Fredithefish/ReasonixPajama-3B-HF](https://huggingface.co/Fredithefish/ReasonixPajama-3B-HF) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Fredithefish__ReasonixPajama-3B-HF", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-17T20:47:42.602044](https://huggingface.co/datasets/open-llm-leaderboard/details_Fredithefish__ReasonixPajama-3B-HF/blob/main/results_2023-10-17T20-47-42.602044.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.005557885906040268, "em_stderr": 0.0007613497667018498, "f1": 0.08515520134228192, "f1_stderr": 0.001865179611495464, "acc": 0.3211223493917147, "acc_stderr": 0.007758248793713638 }, "harness|drop|3": { "em": 0.005557885906040268, "em_stderr": 0.0007613497667018498, "f1": 0.08515520134228192, "f1_stderr": 0.001865179611495464 }, "harness|gsm8k|5": { "acc": 0.00530705079605762, "acc_stderr": 0.002001305720948056 }, "harness|winogrande|5": { "acc": 0.6369376479873717, "acc_stderr": 0.01351519186647922 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_Fredithefish__ReasonixPajama-3B-HF
[ "region:us" ]
2023-08-18T17:47:11+00:00
{"pretty_name": "Evaluation run of Fredithefish/ReasonixPajama-3B-HF", "dataset_summary": "Dataset automatically created during the evaluation run of model [Fredithefish/ReasonixPajama-3B-HF](https://huggingface.co/Fredithefish/ReasonixPajama-3B-HF) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Fredithefish__ReasonixPajama-3B-HF\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-17T20:47:42.602044](https://huggingface.co/datasets/open-llm-leaderboard/details_Fredithefish__ReasonixPajama-3B-HF/blob/main/results_2023-10-17T20-47-42.602044.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.005557885906040268,\n \"em_stderr\": 0.0007613497667018498,\n \"f1\": 0.08515520134228192,\n \"f1_stderr\": 0.001865179611495464,\n \"acc\": 0.3211223493917147,\n \"acc_stderr\": 0.007758248793713638\n },\n \"harness|drop|3\": {\n \"em\": 0.005557885906040268,\n \"em_stderr\": 0.0007613497667018498,\n \"f1\": 0.08515520134228192,\n \"f1_stderr\": 0.001865179611495464\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.00530705079605762,\n \"acc_stderr\": 0.002001305720948056\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6369376479873717,\n \"acc_stderr\": 0.01351519186647922\n }\n}\n```", "repo_url": "https://huggingface.co/Fredithefish/ReasonixPajama-3B-HF", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|arc:challenge|25_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_17T20_47_42.602044", "path": ["**/details_harness|drop|3_2023-10-17T20-47-42.602044.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-17T20-47-42.602044.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_17T20_47_42.602044", "path": ["**/details_harness|gsm8k|5_2023-10-17T20-47-42.602044.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-17T20-47-42.602044.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hellaswag|10_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T15:18:48.992858.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T15:18:48.992858.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T15:18:48.992858.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_17T20_47_42.602044", "path": ["**/details_harness|winogrande|5_2023-10-17T20-47-42.602044.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-17T20-47-42.602044.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T15_18_48.992858", "path": ["results_2023-08-17T15:18:48.992858.parquet"]}, {"split": "2023_10_17T20_47_42.602044", "path": ["results_2023-10-17T20-47-42.602044.parquet"]}, {"split": "latest", "path": ["results_2023-10-17T20-47-42.602044.parquet"]}]}]}
2023-10-17T19:47:55+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Fredithefish/ReasonixPajama-3B-HF ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model Fredithefish/ReasonixPajama-3B-HF on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-17T20:47:42.602044(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of Fredithefish/ReasonixPajama-3B-HF", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Fredithefish/ReasonixPajama-3B-HF on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-17T20:47:42.602044(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Fredithefish/ReasonixPajama-3B-HF", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Fredithefish/ReasonixPajama-3B-HF on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-17T20:47:42.602044(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 23, 31, 171, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Fredithefish/ReasonixPajama-3B-HF## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Fredithefish/ReasonixPajama-3B-HF on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-17T20:47:42.602044(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
e78be966e0c3142479da3162b7c64ff952bced21
# Dataset Card for Evaluation run of Panchovix/airoboros-33b-gpt4-1.2-SuperHOT-8k ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Panchovix/airoboros-33b-gpt4-1.2-SuperHOT-8k - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Panchovix/airoboros-33b-gpt4-1.2-SuperHOT-8k](https://huggingface.co/Panchovix/airoboros-33b-gpt4-1.2-SuperHOT-8k) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Panchovix__airoboros-33b-gpt4-1.2-SuperHOT-8k", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-17T16:57:27.002060](https://huggingface.co/datasets/open-llm-leaderboard/details_Panchovix__airoboros-33b-gpt4-1.2-SuperHOT-8k/blob/main/results_2023-09-17T16-57-27.002060.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0026216442953020135, "em_stderr": 0.0005236685642965811, "f1": 0.005930159395973156, "f1_stderr": 0.0006950327104148204, "acc": 0.2521704814522494, "acc_stderr": 0.007025978032038446 }, "harness|drop|3": { "em": 0.0026216442953020135, "em_stderr": 0.0005236685642965811, "f1": 0.005930159395973156, "f1_stderr": 0.0006950327104148204 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 }, "harness|winogrande|5": { "acc": 0.5043409629044988, "acc_stderr": 0.014051956064076892 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_Panchovix__airoboros-33b-gpt4-1.2-SuperHOT-8k
[ "region:us" ]
2023-08-18T17:47:20+00:00
{"pretty_name": "Evaluation run of Panchovix/airoboros-33b-gpt4-1.2-SuperHOT-8k", "dataset_summary": "Dataset automatically created during the evaluation run of model [Panchovix/airoboros-33b-gpt4-1.2-SuperHOT-8k](https://huggingface.co/Panchovix/airoboros-33b-gpt4-1.2-SuperHOT-8k) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Panchovix__airoboros-33b-gpt4-1.2-SuperHOT-8k\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-17T16:57:27.002060](https://huggingface.co/datasets/open-llm-leaderboard/details_Panchovix__airoboros-33b-gpt4-1.2-SuperHOT-8k/blob/main/results_2023-09-17T16-57-27.002060.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0026216442953020135,\n \"em_stderr\": 0.0005236685642965811,\n \"f1\": 0.005930159395973156,\n \"f1_stderr\": 0.0006950327104148204,\n \"acc\": 0.2521704814522494,\n \"acc_stderr\": 0.007025978032038446\n },\n \"harness|drop|3\": {\n \"em\": 0.0026216442953020135,\n \"em_stderr\": 0.0005236685642965811,\n \"f1\": 0.005930159395973156,\n \"f1_stderr\": 0.0006950327104148204\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5043409629044988,\n \"acc_stderr\": 0.014051956064076892\n }\n}\n```", "repo_url": "https://huggingface.co/Panchovix/airoboros-33b-gpt4-1.2-SuperHOT-8k", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|arc:challenge|25_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_17T16_57_27.002060", "path": ["**/details_harness|drop|3_2023-09-17T16-57-27.002060.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-17T16-57-27.002060.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_17T16_57_27.002060", "path": ["**/details_harness|gsm8k|5_2023-09-17T16-57-27.002060.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-17T16-57-27.002060.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hellaswag|10_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T20:41:42.341199.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T20:41:42.341199.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T20:41:42.341199.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_17T16_57_27.002060", "path": ["**/details_harness|winogrande|5_2023-09-17T16-57-27.002060.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-17T16-57-27.002060.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T20_41_42.341199", "path": ["results_2023-08-17T20:41:42.341199.parquet"]}, {"split": "2023_09_17T16_57_27.002060", "path": ["results_2023-09-17T16-57-27.002060.parquet"]}, {"split": "latest", "path": ["results_2023-09-17T16-57-27.002060.parquet"]}]}]}
2023-09-17T15:57:39+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Panchovix/airoboros-33b-gpt4-1.2-SuperHOT-8k ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model Panchovix/airoboros-33b-gpt4-1.2-SuperHOT-8k on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-17T16:57:27.002060(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of Panchovix/airoboros-33b-gpt4-1.2-SuperHOT-8k", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Panchovix/airoboros-33b-gpt4-1.2-SuperHOT-8k on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T16:57:27.002060(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Panchovix/airoboros-33b-gpt4-1.2-SuperHOT-8k", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Panchovix/airoboros-33b-gpt4-1.2-SuperHOT-8k on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T16:57:27.002060(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 29, 31, 177, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Panchovix/airoboros-33b-gpt4-1.2-SuperHOT-8k## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Panchovix/airoboros-33b-gpt4-1.2-SuperHOT-8k on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-17T16:57:27.002060(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
09b6669e64b2119b6eef54b541697bc47eb32cd4
# Dataset Card for Evaluation run of l3utterfly/open-llama-3b-v2-layla ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/l3utterfly/open-llama-3b-v2-layla - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [l3utterfly/open-llama-3b-v2-layla](https://huggingface.co/l3utterfly/open-llama-3b-v2-layla) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_l3utterfly__open-llama-3b-v2-layla", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-17T08:49:03.131155](https://huggingface.co/datasets/open-llm-leaderboard/details_l3utterfly__open-llama-3b-v2-layla/blob/main/results_2023-09-17T08-49-03.131155.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.011954697986577181, "em_stderr": 0.0011130056898859086, "f1": 0.07875629194630916, "f1_stderr": 0.0018920865515620476, "acc": 0.3194349118852447, "acc_stderr": 0.008202509803690292 }, "harness|drop|3": { "em": 0.011954697986577181, "em_stderr": 0.0011130056898859086, "f1": 0.07875629194630916, "f1_stderr": 0.0018920865515620476 }, "harness|gsm8k|5": { "acc": 0.01061410159211524, "acc_stderr": 0.0028227133223877035 }, "harness|winogrande|5": { "acc": 0.6282557221783741, "acc_stderr": 0.013582306284992879 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_l3utterfly__open-llama-3b-v2-layla
[ "region:us" ]
2023-08-18T17:47:31+00:00
{"pretty_name": "Evaluation run of l3utterfly/open-llama-3b-v2-layla", "dataset_summary": "Dataset automatically created during the evaluation run of model [l3utterfly/open-llama-3b-v2-layla](https://huggingface.co/l3utterfly/open-llama-3b-v2-layla) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_l3utterfly__open-llama-3b-v2-layla\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-17T08:49:03.131155](https://huggingface.co/datasets/open-llm-leaderboard/details_l3utterfly__open-llama-3b-v2-layla/blob/main/results_2023-09-17T08-49-03.131155.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.011954697986577181,\n \"em_stderr\": 0.0011130056898859086,\n \"f1\": 0.07875629194630916,\n \"f1_stderr\": 0.0018920865515620476,\n \"acc\": 0.3194349118852447,\n \"acc_stderr\": 0.008202509803690292\n },\n \"harness|drop|3\": {\n \"em\": 0.011954697986577181,\n \"em_stderr\": 0.0011130056898859086,\n \"f1\": 0.07875629194630916,\n \"f1_stderr\": 0.0018920865515620476\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.01061410159211524,\n \"acc_stderr\": 0.0028227133223877035\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6282557221783741,\n \"acc_stderr\": 0.013582306284992879\n }\n}\n```", "repo_url": "https://huggingface.co/l3utterfly/open-llama-3b-v2-layla", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|arc:challenge|25_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_17T08_49_03.131155", "path": ["**/details_harness|drop|3_2023-09-17T08-49-03.131155.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-17T08-49-03.131155.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_17T08_49_03.131155", "path": ["**/details_harness|gsm8k|5_2023-09-17T08-49-03.131155.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-17T08-49-03.131155.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hellaswag|10_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T14:37:31.844402.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T14:37:31.844402.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T14:37:31.844402.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_17T08_49_03.131155", "path": ["**/details_harness|winogrande|5_2023-09-17T08-49-03.131155.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-17T08-49-03.131155.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_18T14_37_31.844402", "path": ["results_2023-08-18T14:37:31.844402.parquet"]}, {"split": "2023_09_17T08_49_03.131155", "path": ["results_2023-09-17T08-49-03.131155.parquet"]}, {"split": "latest", "path": ["results_2023-09-17T08-49-03.131155.parquet"]}]}]}
2023-09-17T07:49:15+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of l3utterfly/open-llama-3b-v2-layla ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model l3utterfly/open-llama-3b-v2-layla on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-17T08:49:03.131155(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of l3utterfly/open-llama-3b-v2-layla", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model l3utterfly/open-llama-3b-v2-layla on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T08:49:03.131155(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of l3utterfly/open-llama-3b-v2-layla", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model l3utterfly/open-llama-3b-v2-layla on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T08:49:03.131155(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 26, 31, 174, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of l3utterfly/open-llama-3b-v2-layla## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model l3utterfly/open-llama-3b-v2-layla on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-17T08:49:03.131155(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
dca4b0cf1fbe4da7d2f2e1009c06c23fd144d6d6
# Dataset Card for Evaluation run of psmathur/model_101 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/psmathur/model_101 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [psmathur/model_101](https://huggingface.co/psmathur/model_101) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 3 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_psmathur__model_101_public", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-11-08T17:02:48.057771](https://huggingface.co/datasets/open-llm-leaderboard/details_psmathur__model_101_public/blob/main/results_2023-11-08T17-02-48.057771.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.42701342281879195, "em_stderr": 0.0050656203622111255, "f1": 0.5510203439597356, "f1_stderr": 0.004541060645215944, "acc": 0.6344516695758009, "acc_stderr": 0.012237970912161255 }, "harness|drop|3": { "em": 0.42701342281879195, "em_stderr": 0.0050656203622111255, "f1": 0.5510203439597356, "f1_stderr": 0.004541060645215944 }, "harness|gsm8k|5": { "acc": 0.44806671721000757, "acc_stderr": 0.013697992668274525 }, "harness|winogrande|5": { "acc": 0.8208366219415943, "acc_stderr": 0.010777949156047986 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_psmathur__model_101
[ "region:us" ]
2023-08-18T17:47:43+00:00
{"pretty_name": "Evaluation run of psmathur/model_101", "dataset_summary": "Dataset automatically created during the evaluation run of model [psmathur/model_101](https://huggingface.co/psmathur/model_101) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_psmathur__model_101_public\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-11-08T17:02:48.057771](https://huggingface.co/datasets/open-llm-leaderboard/details_psmathur__model_101_public/blob/main/results_2023-11-08T17-02-48.057771.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.42701342281879195,\n \"em_stderr\": 0.0050656203622111255,\n \"f1\": 0.5510203439597356,\n \"f1_stderr\": 0.004541060645215944,\n \"acc\": 0.6344516695758009,\n \"acc_stderr\": 0.012237970912161255\n },\n \"harness|drop|3\": {\n \"em\": 0.42701342281879195,\n \"em_stderr\": 0.0050656203622111255,\n \"f1\": 0.5510203439597356,\n \"f1_stderr\": 0.004541060645215944\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.44806671721000757,\n \"acc_stderr\": 0.013697992668274525\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8208366219415943,\n \"acc_stderr\": 0.010777949156047986\n }\n}\n```", "repo_url": "https://huggingface.co/psmathur/model_101", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_drop_3", "data_files": [{"split": "2023_11_08T17_02_48.057771", "path": ["**/details_harness|drop|3_2023-11-08T17-02-48.057771.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-11-08T17-02-48.057771.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_11_08T17_02_48.057771", "path": ["**/details_harness|gsm8k|5_2023-11-08T17-02-48.057771.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-11-08T17-02-48.057771.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_11_08T17_02_48.057771", "path": ["**/details_harness|winogrande|5_2023-11-08T17-02-48.057771.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-11-08T17-02-48.057771.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_11_08T17_02_48.057771", "path": ["results_2023-11-08T17-02-48.057771.parquet"]}, {"split": "latest", "path": ["results_2023-11-08T17-02-48.057771.parquet"]}]}]}
2023-12-01T14:51:33+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of psmathur/model_101 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model psmathur/model_101 on the Open LLM Leaderboard. The dataset is composed of 3 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-11-08T17:02:48.057771(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of psmathur/model_101", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model psmathur/model_101 on the Open LLM Leaderboard.\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-11-08T17:02:48.057771(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of psmathur/model_101", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model psmathur/model_101 on the Open LLM Leaderboard.\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-11-08T17:02:48.057771(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 16, 31, 165, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of psmathur/model_101## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model psmathur/model_101 on the Open LLM Leaderboard.\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-11-08T17:02:48.057771(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
c9695acc35534800e71877f760e080aa31a6ad44
# Dataset of lyrica_prismriver/リリカ・プリズムリバー/리리카프리즘리버 (Touhou) This is the dataset of lyrica_prismriver/リリカ・プリズムリバー/리리카프리즘리버 (Touhou), containing 343 images and their tags. The core tags of this character are `short_hair, hat, brown_hair, brown_eyes`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:--------------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 343 | 216.41 MiB | [Download](https://huggingface.co/datasets/CyberHarem/lyrica_prismriver_touhou/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 343 | 173.10 MiB | [Download](https://huggingface.co/datasets/CyberHarem/lyrica_prismriver_touhou/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 539 | 289.56 MiB | [Download](https://huggingface.co/datasets/CyberHarem/lyrica_prismriver_touhou/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 343 | 209.97 MiB | [Download](https://huggingface.co/datasets/CyberHarem/lyrica_prismriver_touhou/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 539 | 338.58 MiB | [Download](https://huggingface.co/datasets/CyberHarem/lyrica_prismriver_touhou/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/lyrica_prismriver_touhou', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 0 | 16 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, keyboard_(instrument), red_headwear, red_skirt, red_vest, solo, white_shirt, bangs, frilled_hat, looking_at_viewer, smile, star_(symbol), blush, buttons, hair_between_eyes, open_mouth, juliet_sleeves, eighth_note, playing_instrument | | 1 | 7 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, full_body, keyboard_(instrument), long_sleeves, red_headwear, red_vest, shoes, solo, white_shirt, bangs, brown_footwear, playing_instrument, white_background, white_socks, looking_at_viewer, open_mouth, red_skirt, star_hat_ornament, simple_background, standing, :d, blush, frilled_hat | | 2 | 5 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, keyboard_(instrument), solo, one_eye_closed, smile | | 3 | 17 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, smile, solo, keyboard_(instrument), open_mouth, star_(symbol), skirt | | 4 | 26 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | keyboard_(instrument), sisters, 2girls, smile, open_mouth, blonde_hair | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | keyboard_(instrument) | red_headwear | red_skirt | red_vest | solo | white_shirt | bangs | frilled_hat | looking_at_viewer | smile | star_(symbol) | blush | buttons | hair_between_eyes | open_mouth | juliet_sleeves | eighth_note | playing_instrument | full_body | long_sleeves | shoes | brown_footwear | white_background | white_socks | star_hat_ornament | simple_background | standing | :d | one_eye_closed | skirt | sisters | 2girls | blonde_hair | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:------------------------|:---------------|:------------|:-----------|:-------|:--------------|:--------|:--------------|:--------------------|:--------|:----------------|:--------|:----------|:--------------------|:-------------|:-----------------|:--------------|:---------------------|:------------|:---------------|:--------|:-----------------|:-------------------|:--------------|:--------------------|:--------------------|:-----------|:-----|:-----------------|:--------|:----------|:---------|:--------------| | 0 | 16 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | 1 | 7 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | X | X | X | X | X | X | X | X | | | X | | | X | | | X | X | X | X | X | X | X | X | X | X | X | | | | | | | 2 | 5 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | X | | | | X | | | | | X | | | | | | | | | | | | | | | | | | | X | | | | | | 3 | 17 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | X | | | | X | | | | | X | X | | | | X | | | | | | | | | | | | | | | X | | | | | 4 | 26 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | | X | | | | | | | | | X | | | | | X | | | | | | | | | | | | | | | | X | X | X |
CyberHarem/lyrica_prismriver_touhou
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-18T17:47:49+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-01-15T07:49:02+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of lyrica\_prismriver/リリカ・プリズムリバー/리리카프리즘리버 (Touhou) =========================================================== This is the dataset of lyrica\_prismriver/リリカ・プリズムリバー/리리카프리즘리버 (Touhou), containing 343 images and their tags. The core tags of this character are 'short\_hair, hat, brown\_hair, brown\_eyes', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ 44, 61, 5, 4 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.### Raw Text Version### Table Version" ]
525287860a7028d652e8667591bf420ce3d15667
# Dataset Card for Evaluation run of psmathur/orca_mini_v3_70b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/psmathur/orca_mini_v3_70b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [psmathur/orca_mini_v3_70b](https://huggingface.co/psmathur/orca_mini_v3_70b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 3 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_psmathur__orca_mini_v3_70b_public", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-11-09T13:40:37.998536](https://huggingface.co/datasets/open-llm-leaderboard/details_psmathur__orca_mini_v3_70b_public/blob/main/results_2023-11-09T13-40-37.998536.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.31061241610738255, "em_stderr": 0.004738935370907925, "f1": 0.4017103607382563, "f1_stderr": 0.004555690324539627, "acc": 0.6178968305495601, "acc_stderr": 0.012083802131657148 }, "harness|drop|3": { "em": 0.31061241610738255, "em_stderr": 0.004738935370907925, "f1": 0.4017103607382563, "f1_stderr": 0.004555690324539627 }, "harness|gsm8k|5": { "acc": 0.4086429112964367, "acc_stderr": 0.01354063973334243 }, "harness|winogrande|5": { "acc": 0.8271507498026835, "acc_stderr": 0.010626964529971864 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_psmathur__orca_mini_v3_70b
[ "region:us" ]
2023-08-18T17:47:52+00:00
{"pretty_name": "Evaluation run of psmathur/orca_mini_v3_70b", "dataset_summary": "Dataset automatically created during the evaluation run of model [psmathur/orca_mini_v3_70b](https://huggingface.co/psmathur/orca_mini_v3_70b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_psmathur__orca_mini_v3_70b_public\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-11-09T13:40:37.998536](https://huggingface.co/datasets/open-llm-leaderboard/details_psmathur__orca_mini_v3_70b_public/blob/main/results_2023-11-09T13-40-37.998536.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.31061241610738255,\n \"em_stderr\": 0.004738935370907925,\n \"f1\": 0.4017103607382563,\n \"f1_stderr\": 0.004555690324539627,\n \"acc\": 0.6178968305495601,\n \"acc_stderr\": 0.012083802131657148\n },\n \"harness|drop|3\": {\n \"em\": 0.31061241610738255,\n \"em_stderr\": 0.004738935370907925,\n \"f1\": 0.4017103607382563,\n \"f1_stderr\": 0.004555690324539627\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.4086429112964367,\n \"acc_stderr\": 0.01354063973334243\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8271507498026835,\n \"acc_stderr\": 0.010626964529971864\n }\n}\n```", "repo_url": "https://huggingface.co/psmathur/orca_mini_v3_70b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_drop_3", "data_files": [{"split": "2023_11_09T13_40_37.998536", "path": ["**/details_harness|drop|3_2023-11-09T13-40-37.998536.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-11-09T13-40-37.998536.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_11_09T13_40_37.998536", "path": ["**/details_harness|gsm8k|5_2023-11-09T13-40-37.998536.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-11-09T13-40-37.998536.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_11_09T13_40_37.998536", "path": ["**/details_harness|winogrande|5_2023-11-09T13-40-37.998536.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-11-09T13-40-37.998536.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_11_09T13_40_37.998536", "path": ["results_2023-11-09T13-40-37.998536.parquet"]}, {"split": "latest", "path": ["results_2023-11-09T13-40-37.998536.parquet"]}]}]}
2023-12-01T14:56:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of psmathur/orca_mini_v3_70b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model psmathur/orca_mini_v3_70b on the Open LLM Leaderboard. The dataset is composed of 3 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-11-09T13:40:37.998536(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of psmathur/orca_mini_v3_70b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model psmathur/orca_mini_v3_70b on the Open LLM Leaderboard.\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-11-09T13:40:37.998536(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of psmathur/orca_mini_v3_70b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model psmathur/orca_mini_v3_70b on the Open LLM Leaderboard.\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-11-09T13:40:37.998536(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 23, 31, 172, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of psmathur/orca_mini_v3_70b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model psmathur/orca_mini_v3_70b on the Open LLM Leaderboard.\n\nThe dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-11-09T13:40:37.998536(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
e40bc1815d971706ebce4a67e169144b115a7977
# Dataset Card for Evaluation run of TFLai/gpt-neo-1.3B-4bit-alpaca ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/TFLai/gpt-neo-1.3B-4bit-alpaca - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [TFLai/gpt-neo-1.3B-4bit-alpaca](https://huggingface.co/TFLai/gpt-neo-1.3B-4bit-alpaca) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TFLai__gpt-neo-1.3B-4bit-alpaca", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-23T17:57:15.784929](https://huggingface.co/datasets/open-llm-leaderboard/details_TFLai__gpt-neo-1.3B-4bit-alpaca/blob/main/results_2023-09-23T17-57-15.784929.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0009437919463087249, "em_stderr": 0.00031446531194133983, "f1": 0.05118708053691287, "f1_stderr": 0.001257884278930374, "acc": 0.2821159149890526, "acc_stderr": 0.007628169555669113 }, "harness|drop|3": { "em": 0.0009437919463087249, "em_stderr": 0.00031446531194133983, "f1": 0.05118708053691287, "f1_stderr": 0.001257884278930374 }, "harness|gsm8k|5": { "acc": 0.002274450341167551, "acc_stderr": 0.0013121578148674233 }, "harness|winogrande|5": { "acc": 0.5619573796369376, "acc_stderr": 0.013944181296470803 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_TFLai__gpt-neo-1.3B-4bit-alpaca
[ "region:us" ]
2023-08-18T17:48:01+00:00
{"pretty_name": "Evaluation run of TFLai/gpt-neo-1.3B-4bit-alpaca", "dataset_summary": "Dataset automatically created during the evaluation run of model [TFLai/gpt-neo-1.3B-4bit-alpaca](https://huggingface.co/TFLai/gpt-neo-1.3B-4bit-alpaca) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TFLai__gpt-neo-1.3B-4bit-alpaca\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-23T17:57:15.784929](https://huggingface.co/datasets/open-llm-leaderboard/details_TFLai__gpt-neo-1.3B-4bit-alpaca/blob/main/results_2023-09-23T17-57-15.784929.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0009437919463087249,\n \"em_stderr\": 0.00031446531194133983,\n \"f1\": 0.05118708053691287,\n \"f1_stderr\": 0.001257884278930374,\n \"acc\": 0.2821159149890526,\n \"acc_stderr\": 0.007628169555669113\n },\n \"harness|drop|3\": {\n \"em\": 0.0009437919463087249,\n \"em_stderr\": 0.00031446531194133983,\n \"f1\": 0.05118708053691287,\n \"f1_stderr\": 0.001257884278930374\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.002274450341167551,\n \"acc_stderr\": 0.0013121578148674233\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5619573796369376,\n \"acc_stderr\": 0.013944181296470803\n }\n}\n```", "repo_url": "https://huggingface.co/TFLai/gpt-neo-1.3B-4bit-alpaca", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|arc:challenge|25_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_23T17_57_15.784929", "path": ["**/details_harness|drop|3_2023-09-23T17-57-15.784929.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-23T17-57-15.784929.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_23T17_57_15.784929", "path": ["**/details_harness|gsm8k|5_2023-09-23T17-57-15.784929.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-23T17-57-15.784929.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hellaswag|10_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T13:07:16.687815.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T13:07:16.687815.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T13:07:16.687815.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_23T17_57_15.784929", "path": ["**/details_harness|winogrande|5_2023-09-23T17-57-15.784929.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-23T17-57-15.784929.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_18T13_07_16.687815", "path": ["results_2023-08-18T13:07:16.687815.parquet"]}, {"split": "2023_09_23T17_57_15.784929", "path": ["results_2023-09-23T17-57-15.784929.parquet"]}, {"split": "latest", "path": ["results_2023-09-23T17-57-15.784929.parquet"]}]}]}
2023-09-23T16:57:27+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of TFLai/gpt-neo-1.3B-4bit-alpaca ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model TFLai/gpt-neo-1.3B-4bit-alpaca on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-23T17:57:15.784929(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of TFLai/gpt-neo-1.3B-4bit-alpaca", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model TFLai/gpt-neo-1.3B-4bit-alpaca on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-23T17:57:15.784929(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of TFLai/gpt-neo-1.3B-4bit-alpaca", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model TFLai/gpt-neo-1.3B-4bit-alpaca on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-23T17:57:15.784929(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 25, 31, 173, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of TFLai/gpt-neo-1.3B-4bit-alpaca## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model TFLai/gpt-neo-1.3B-4bit-alpaca on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-23T17:57:15.784929(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
6213d18809dce617089cf794fb5ba0933197c053
# Dataset Card for Evaluation run of YeungNLP/firefly-bloom-7b1 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/YeungNLP/firefly-bloom-7b1 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [YeungNLP/firefly-bloom-7b1](https://huggingface.co/YeungNLP/firefly-bloom-7b1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_YeungNLP__firefly-bloom-7b1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-15T03:08:36.849842](https://huggingface.co/datasets/open-llm-leaderboard/details_YeungNLP__firefly-bloom-7b1/blob/main/results_2023-10-15T03-08-36.849842.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.03208892617449664, "em_stderr": 0.0018048244787816678, "f1": 0.1036986157718121, "f1_stderr": 0.0023306866623647965, "acc": 0.326221462409936, "acc_stderr": 0.007855425735305286 }, "harness|drop|3": { "em": 0.03208892617449664, "em_stderr": 0.0018048244787816678, "f1": 0.1036986157718121, "f1_stderr": 0.0023306866623647965 }, "harness|gsm8k|5": { "acc": 0.006823351023502654, "acc_stderr": 0.0022675371022544836 }, "harness|winogrande|5": { "acc": 0.6456195737963694, "acc_stderr": 0.013443314368356088 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_YeungNLP__firefly-bloom-7b1
[ "region:us" ]
2023-08-18T17:48:12+00:00
{"pretty_name": "Evaluation run of YeungNLP/firefly-bloom-7b1", "dataset_summary": "Dataset automatically created during the evaluation run of model [YeungNLP/firefly-bloom-7b1](https://huggingface.co/YeungNLP/firefly-bloom-7b1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_YeungNLP__firefly-bloom-7b1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-15T03:08:36.849842](https://huggingface.co/datasets/open-llm-leaderboard/details_YeungNLP__firefly-bloom-7b1/blob/main/results_2023-10-15T03-08-36.849842.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.03208892617449664,\n \"em_stderr\": 0.0018048244787816678,\n \"f1\": 0.1036986157718121,\n \"f1_stderr\": 0.0023306866623647965,\n \"acc\": 0.326221462409936,\n \"acc_stderr\": 0.007855425735305286\n },\n \"harness|drop|3\": {\n \"em\": 0.03208892617449664,\n \"em_stderr\": 0.0018048244787816678,\n \"f1\": 0.1036986157718121,\n \"f1_stderr\": 0.0023306866623647965\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.006823351023502654,\n \"acc_stderr\": 0.0022675371022544836\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6456195737963694,\n \"acc_stderr\": 0.013443314368356088\n }\n}\n```", "repo_url": "https://huggingface.co/YeungNLP/firefly-bloom-7b1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|arc:challenge|25_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_15T03_08_36.849842", "path": ["**/details_harness|drop|3_2023-10-15T03-08-36.849842.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-15T03-08-36.849842.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_15T03_08_36.849842", "path": ["**/details_harness|gsm8k|5_2023-10-15T03-08-36.849842.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-15T03-08-36.849842.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hellaswag|10_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T18:41:37.942439.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T18:41:37.942439.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T18:41:37.942439.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_15T03_08_36.849842", "path": ["**/details_harness|winogrande|5_2023-10-15T03-08-36.849842.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-15T03-08-36.849842.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T18_41_37.942439", "path": ["results_2023-08-17T18:41:37.942439.parquet"]}, {"split": "2023_10_15T03_08_36.849842", "path": ["results_2023-10-15T03-08-36.849842.parquet"]}, {"split": "latest", "path": ["results_2023-10-15T03-08-36.849842.parquet"]}]}]}
2023-10-15T02:08:48+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of YeungNLP/firefly-bloom-7b1 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model YeungNLP/firefly-bloom-7b1 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-15T03:08:36.849842(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of YeungNLP/firefly-bloom-7b1", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model YeungNLP/firefly-bloom-7b1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T03:08:36.849842(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of YeungNLP/firefly-bloom-7b1", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model YeungNLP/firefly-bloom-7b1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T03:08:36.849842(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 22, 31, 170, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of YeungNLP/firefly-bloom-7b1## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model YeungNLP/firefly-bloom-7b1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-15T03:08:36.849842(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
a1b28e0670bfcca5cce9ec5a07b87701f543a9ed
# Dataset Card for Evaluation run of ehartford/samantha-1.1-llama-33b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/ehartford/samantha-1.1-llama-33b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [ehartford/samantha-1.1-llama-33b](https://huggingface.co/ehartford/samantha-1.1-llama-33b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ehartford__samantha-1.1-llama-33b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-17T11:42:44.859774](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__samantha-1.1-llama-33b/blob/main/results_2023-09-17T11-42-44.859774.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.20994127516778524, "em_stderr": 0.004170789326061049, "f1": 0.2829341442953027, "f1_stderr": 0.004181823285876536, "acc": 0.4024903466008606, "acc_stderr": 0.008664723950310687 }, "harness|drop|3": { "em": 0.20994127516778524, "em_stderr": 0.004170789326061049, "f1": 0.2829341442953027, "f1_stderr": 0.004181823285876536 }, "harness|gsm8k|5": { "acc": 0.0401819560272934, "acc_stderr": 0.00540943973697051 }, "harness|winogrande|5": { "acc": 0.7647987371744278, "acc_stderr": 0.011920008163650865 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_ehartford__samantha-1.1-llama-33b
[ "region:us" ]
2023-08-18T17:48:22+00:00
{"pretty_name": "Evaluation run of ehartford/samantha-1.1-llama-33b", "dataset_summary": "Dataset automatically created during the evaluation run of model [ehartford/samantha-1.1-llama-33b](https://huggingface.co/ehartford/samantha-1.1-llama-33b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ehartford__samantha-1.1-llama-33b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-17T11:42:44.859774](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__samantha-1.1-llama-33b/blob/main/results_2023-09-17T11-42-44.859774.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.20994127516778524,\n \"em_stderr\": 0.004170789326061049,\n \"f1\": 0.2829341442953027,\n \"f1_stderr\": 0.004181823285876536,\n \"acc\": 0.4024903466008606,\n \"acc_stderr\": 0.008664723950310687\n },\n \"harness|drop|3\": {\n \"em\": 0.20994127516778524,\n \"em_stderr\": 0.004170789326061049,\n \"f1\": 0.2829341442953027,\n \"f1_stderr\": 0.004181823285876536\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0401819560272934,\n \"acc_stderr\": 0.00540943973697051\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7647987371744278,\n \"acc_stderr\": 0.011920008163650865\n }\n}\n```", "repo_url": "https://huggingface.co/ehartford/samantha-1.1-llama-33b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|arc:challenge|25_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_17T11_42_44.859774", "path": ["**/details_harness|drop|3_2023-09-17T11-42-44.859774.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-17T11-42-44.859774.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_17T11_42_44.859774", "path": ["**/details_harness|gsm8k|5_2023-09-17T11-42-44.859774.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-17T11-42-44.859774.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hellaswag|10_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T14:31:51.159426.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T14:31:51.159426.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T14:31:51.159426.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_17T11_42_44.859774", "path": ["**/details_harness|winogrande|5_2023-09-17T11-42-44.859774.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-17T11-42-44.859774.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_18T14_31_51.159426", "path": ["results_2023-08-18T14:31:51.159426.parquet"]}, {"split": "2023_09_17T11_42_44.859774", "path": ["results_2023-09-17T11-42-44.859774.parquet"]}, {"split": "latest", "path": ["results_2023-09-17T11-42-44.859774.parquet"]}]}]}
2023-09-17T10:42:56+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ehartford/samantha-1.1-llama-33b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model ehartford/samantha-1.1-llama-33b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-17T11:42:44.859774(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of ehartford/samantha-1.1-llama-33b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/samantha-1.1-llama-33b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T11:42:44.859774(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ehartford/samantha-1.1-llama-33b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/samantha-1.1-llama-33b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T11:42:44.859774(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 22, 31, 170, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of ehartford/samantha-1.1-llama-33b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/samantha-1.1-llama-33b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-17T11:42:44.859774(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
9cbe4a33756d479b83487d9a0b6e36f8569c44a3
# Dataset Card for Evaluation run of ehartford/minotaur-llama2-13b-qlora ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/ehartford/minotaur-llama2-13b-qlora - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [ehartford/minotaur-llama2-13b-qlora](https://huggingface.co/ehartford/minotaur-llama2-13b-qlora) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ehartford__minotaur-llama2-13b-qlora", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-18T15:04:43.110639](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__minotaur-llama2-13b-qlora/blob/main/results_2023-10-18T15-04-43.110639.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.08305369127516779, "em_stderr": 0.0028261230954209926, "f1": 0.14533661912751625, "f1_stderr": 0.003000368188887415, "acc": 0.4414884036541998, "acc_stderr": 0.010464953595556116 }, "harness|drop|3": { "em": 0.08305369127516779, "em_stderr": 0.0028261230954209926, "f1": 0.14533661912751625, "f1_stderr": 0.003000368188887415 }, "harness|gsm8k|5": { "acc": 0.12054586808188021, "acc_stderr": 0.008968608285309073 }, "harness|winogrande|5": { "acc": 0.7624309392265194, "acc_stderr": 0.011961298905803159 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_ehartford__minotaur-llama2-13b-qlora
[ "region:us" ]
2023-08-18T17:48:31+00:00
{"pretty_name": "Evaluation run of ehartford/minotaur-llama2-13b-qlora", "dataset_summary": "Dataset automatically created during the evaluation run of model [ehartford/minotaur-llama2-13b-qlora](https://huggingface.co/ehartford/minotaur-llama2-13b-qlora) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ehartford__minotaur-llama2-13b-qlora\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-18T15:04:43.110639](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__minotaur-llama2-13b-qlora/blob/main/results_2023-10-18T15-04-43.110639.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.08305369127516779,\n \"em_stderr\": 0.0028261230954209926,\n \"f1\": 0.14533661912751625,\n \"f1_stderr\": 0.003000368188887415,\n \"acc\": 0.4414884036541998,\n \"acc_stderr\": 0.010464953595556116\n },\n \"harness|drop|3\": {\n \"em\": 0.08305369127516779,\n \"em_stderr\": 0.0028261230954209926,\n \"f1\": 0.14533661912751625,\n \"f1_stderr\": 0.003000368188887415\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.12054586808188021,\n \"acc_stderr\": 0.008968608285309073\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7624309392265194,\n \"acc_stderr\": 0.011961298905803159\n }\n}\n```", "repo_url": "https://huggingface.co/ehartford/minotaur-llama2-13b-qlora", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|arc:challenge|25_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_18T15_04_43.110639", "path": ["**/details_harness|drop|3_2023-10-18T15-04-43.110639.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-18T15-04-43.110639.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_18T15_04_43.110639", "path": ["**/details_harness|gsm8k|5_2023-10-18T15-04-43.110639.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-18T15-04-43.110639.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hellaswag|10_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T01:34:00.982275.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T01:34:00.982275.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T01:34:00.982275.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_18T15_04_43.110639", "path": ["**/details_harness|winogrande|5_2023-10-18T15-04-43.110639.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-18T15-04-43.110639.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_18T01_34_00.982275", "path": ["results_2023-08-18T01:34:00.982275.parquet"]}, {"split": "2023_10_18T15_04_43.110639", "path": ["results_2023-10-18T15-04-43.110639.parquet"]}, {"split": "latest", "path": ["results_2023-10-18T15-04-43.110639.parquet"]}]}]}
2023-10-18T14:04:55+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of ehartford/minotaur-llama2-13b-qlora ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model ehartford/minotaur-llama2-13b-qlora on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-18T15:04:43.110639(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of ehartford/minotaur-llama2-13b-qlora", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/minotaur-llama2-13b-qlora on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-18T15:04:43.110639(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of ehartford/minotaur-llama2-13b-qlora", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/minotaur-llama2-13b-qlora on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-18T15:04:43.110639(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 25, 31, 173, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of ehartford/minotaur-llama2-13b-qlora## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model ehartford/minotaur-llama2-13b-qlora on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-18T15:04:43.110639(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
97e63407ea9f096d619b94d86010ecd1069b2c10
# Dataset Card for Evaluation run of v2ray/LLaMA-2-Wizard-70B-QLoRA ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/v2ray/LLaMA-2-Wizard-70B-QLoRA - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [v2ray/LLaMA-2-Wizard-70B-QLoRA](https://huggingface.co/v2ray/LLaMA-2-Wizard-70B-QLoRA) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_v2ray__LLaMA-2-Wizard-70B-QLoRA", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-13T16:29:43.203362](https://huggingface.co/datasets/open-llm-leaderboard/details_v2ray__LLaMA-2-Wizard-70B-QLoRA/blob/main/results_2023-10-13T16-29-43.203362.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.5358640939597316, "em_stderr": 0.005107278772685844, "f1": 0.5902537751677871, "f1_stderr": 0.004795935527255125, "acc": 0.5639903828029773, "acc_stderr": 0.011700610418717068 }, "harness|drop|3": { "em": 0.5358640939597316, "em_stderr": 0.005107278772685844, "f1": 0.5902537751677871, "f1_stderr": 0.004795935527255125 }, "harness|gsm8k|5": { "acc": 0.30477634571645185, "acc_stderr": 0.012679297549515413 }, "harness|winogrande|5": { "acc": 0.8232044198895028, "acc_stderr": 0.010721923287918725 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_v2ray__LLaMA-2-Wizard-70B-QLoRA
[ "region:us" ]
2023-08-18T17:48:40+00:00
{"pretty_name": "Evaluation run of v2ray/LLaMA-2-Wizard-70B-QLoRA", "dataset_summary": "Dataset automatically created during the evaluation run of model [v2ray/LLaMA-2-Wizard-70B-QLoRA](https://huggingface.co/v2ray/LLaMA-2-Wizard-70B-QLoRA) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_v2ray__LLaMA-2-Wizard-70B-QLoRA\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-13T16:29:43.203362](https://huggingface.co/datasets/open-llm-leaderboard/details_v2ray__LLaMA-2-Wizard-70B-QLoRA/blob/main/results_2023-10-13T16-29-43.203362.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.5358640939597316,\n \"em_stderr\": 0.005107278772685844,\n \"f1\": 0.5902537751677871,\n \"f1_stderr\": 0.004795935527255125,\n \"acc\": 0.5639903828029773,\n \"acc_stderr\": 0.011700610418717068\n },\n \"harness|drop|3\": {\n \"em\": 0.5358640939597316,\n \"em_stderr\": 0.005107278772685844,\n \"f1\": 0.5902537751677871,\n \"f1_stderr\": 0.004795935527255125\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.30477634571645185,\n \"acc_stderr\": 0.012679297549515413\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8232044198895028,\n \"acc_stderr\": 0.010721923287918725\n }\n}\n```", "repo_url": "https://huggingface.co/v2ray/LLaMA-2-Wizard-70B-QLoRA", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|arc:challenge|25_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_13T16_29_43.203362", "path": ["**/details_harness|drop|3_2023-10-13T16-29-43.203362.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-13T16-29-43.203362.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_13T16_29_43.203362", "path": ["**/details_harness|gsm8k|5_2023-10-13T16-29-43.203362.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-13T16-29-43.203362.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hellaswag|10_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T07:09:43.451689.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T07:09:43.451689.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T07:09:43.451689.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_13T16_29_43.203362", "path": ["**/details_harness|winogrande|5_2023-10-13T16-29-43.203362.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-13T16-29-43.203362.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_18T07_09_43.451689", "path": ["results_2023-08-18T07:09:43.451689.parquet"]}, {"split": "2023_10_13T16_29_43.203362", "path": ["results_2023-10-13T16-29-43.203362.parquet"]}, {"split": "latest", "path": ["results_2023-10-13T16-29-43.203362.parquet"]}]}]}
2023-10-13T15:29:55+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of v2ray/LLaMA-2-Wizard-70B-QLoRA ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model v2ray/LLaMA-2-Wizard-70B-QLoRA on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-13T16:29:43.203362(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of v2ray/LLaMA-2-Wizard-70B-QLoRA", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model v2ray/LLaMA-2-Wizard-70B-QLoRA on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-13T16:29:43.203362(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of v2ray/LLaMA-2-Wizard-70B-QLoRA", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model v2ray/LLaMA-2-Wizard-70B-QLoRA on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-13T16:29:43.203362(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 27, 31, 175, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of v2ray/LLaMA-2-Wizard-70B-QLoRA## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model v2ray/LLaMA-2-Wizard-70B-QLoRA on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-13T16:29:43.203362(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
63d3550ef03a6690176201be48044902734382fd
# Dataset Card for Evaluation run of digitous/13B-Chimera ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/digitous/13B-Chimera - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [digitous/13B-Chimera](https://huggingface.co/digitous/13B-Chimera) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_digitous__13B-Chimera", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-21T22:03:30.588181](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__13B-Chimera/blob/main/results_2023-10-21T22-03-30.588181.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.2860738255033557, "em_stderr": 0.004628128039725735, "f1": 0.35844274328859277, "f1_stderr": 0.004563129120809242, "acc": 0.4397952815178321, "acc_stderr": 0.010144797366305785 }, "harness|drop|3": { "em": 0.2860738255033557, "em_stderr": 0.004628128039725735, "f1": 0.35844274328859277, "f1_stderr": 0.004563129120809242 }, "harness|gsm8k|5": { "acc": 0.1068991660348749, "acc_stderr": 0.008510982565520481 }, "harness|winogrande|5": { "acc": 0.7726913970007893, "acc_stderr": 0.011778612167091088 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_digitous__13B-Chimera
[ "region:us" ]
2023-08-18T17:48:50+00:00
{"pretty_name": "Evaluation run of digitous/13B-Chimera", "dataset_summary": "Dataset automatically created during the evaluation run of model [digitous/13B-Chimera](https://huggingface.co/digitous/13B-Chimera) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_digitous__13B-Chimera\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-21T22:03:30.588181](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__13B-Chimera/blob/main/results_2023-10-21T22-03-30.588181.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.2860738255033557,\n \"em_stderr\": 0.004628128039725735,\n \"f1\": 0.35844274328859277,\n \"f1_stderr\": 0.004563129120809242,\n \"acc\": 0.4397952815178321,\n \"acc_stderr\": 0.010144797366305785\n },\n \"harness|drop|3\": {\n \"em\": 0.2860738255033557,\n \"em_stderr\": 0.004628128039725735,\n \"f1\": 0.35844274328859277,\n \"f1_stderr\": 0.004563129120809242\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.1068991660348749,\n \"acc_stderr\": 0.008510982565520481\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7726913970007893,\n \"acc_stderr\": 0.011778612167091088\n }\n}\n```", "repo_url": "https://huggingface.co/digitous/13B-Chimera", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|arc:challenge|25_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_21T22_03_30.588181", "path": ["**/details_harness|drop|3_2023-10-21T22-03-30.588181.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-21T22-03-30.588181.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_21T22_03_30.588181", "path": ["**/details_harness|gsm8k|5_2023-10-21T22-03-30.588181.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-21T22-03-30.588181.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hellaswag|10_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T15:36:44.224352.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T15:36:44.224352.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T15:36:44.224352.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_21T22_03_30.588181", "path": ["**/details_harness|winogrande|5_2023-10-21T22-03-30.588181.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-21T22-03-30.588181.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T15_36_44.224352", "path": ["results_2023-08-17T15:36:44.224352.parquet"]}, {"split": "2023_10_21T22_03_30.588181", "path": ["results_2023-10-21T22-03-30.588181.parquet"]}, {"split": "latest", "path": ["results_2023-10-21T22-03-30.588181.parquet"]}]}]}
2023-10-21T21:03:42+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of digitous/13B-Chimera ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model digitous/13B-Chimera on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-21T22:03:30.588181(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of digitous/13B-Chimera", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/13B-Chimera on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-21T22:03:30.588181(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of digitous/13B-Chimera", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/13B-Chimera on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-21T22:03:30.588181(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 16, 31, 164, 68, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of digitous/13B-Chimera## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model digitous/13B-Chimera on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-21T22:03:30.588181(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
a731e4c7c44a852889a14f33de0efb38f107370d
# Dataset Card for Evaluation run of lmsys/vicuna-7b-v1.5-16k ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/lmsys/vicuna-7b-v1.5-16k - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [lmsys/vicuna-7b-v1.5-16k](https://huggingface.co/lmsys/vicuna-7b-v1.5-16k) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_lmsys__vicuna-7b-v1.5-16k", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-21T18:19:50.042065](https://huggingface.co/datasets/open-llm-leaderboard/details_lmsys__vicuna-7b-v1.5-16k/blob/main/results_2023-10-21T18-19-50.042065.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.008913590604026845, "em_stderr": 0.0009625465757536675, "f1": 0.07239303691275177, "f1_stderr": 0.001693138759317829, "acc": 0.38701199696261246, "acc_stderr": 0.00973738194322199 }, "harness|drop|3": { "em": 0.008913590604026845, "em_stderr": 0.0009625465757536675, "f1": 0.07239303691275177, "f1_stderr": 0.001693138759317829 }, "harness|gsm8k|5": { "acc": 0.06368460955269144, "acc_stderr": 0.006726213078805721 }, "harness|winogrande|5": { "acc": 0.7103393843725335, "acc_stderr": 0.012748550807638257 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_lmsys__vicuna-7b-v1.5-16k
[ "region:us" ]
2023-08-18T17:49:03+00:00
{"pretty_name": "Evaluation run of lmsys/vicuna-7b-v1.5-16k", "dataset_summary": "Dataset automatically created during the evaluation run of model [lmsys/vicuna-7b-v1.5-16k](https://huggingface.co/lmsys/vicuna-7b-v1.5-16k) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_lmsys__vicuna-7b-v1.5-16k\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-21T18:19:50.042065](https://huggingface.co/datasets/open-llm-leaderboard/details_lmsys__vicuna-7b-v1.5-16k/blob/main/results_2023-10-21T18-19-50.042065.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.008913590604026845,\n \"em_stderr\": 0.0009625465757536675,\n \"f1\": 0.07239303691275177,\n \"f1_stderr\": 0.001693138759317829,\n \"acc\": 0.38701199696261246,\n \"acc_stderr\": 0.00973738194322199\n },\n \"harness|drop|3\": {\n \"em\": 0.008913590604026845,\n \"em_stderr\": 0.0009625465757536675,\n \"f1\": 0.07239303691275177,\n \"f1_stderr\": 0.001693138759317829\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.06368460955269144,\n \"acc_stderr\": 0.006726213078805721\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7103393843725335,\n \"acc_stderr\": 0.012748550807638257\n }\n}\n```", "repo_url": "https://huggingface.co/lmsys/vicuna-7b-v1.5-16k", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|arc:challenge|25_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_19T11_48_14.833652", "path": ["**/details_harness|drop|3_2023-10-19T11-48-14.833652.parquet"]}, {"split": "2023_10_21T18_19_50.042065", "path": ["**/details_harness|drop|3_2023-10-21T18-19-50.042065.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-21T18-19-50.042065.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_19T11_48_14.833652", "path": ["**/details_harness|gsm8k|5_2023-10-19T11-48-14.833652.parquet"]}, {"split": "2023_10_21T18_19_50.042065", "path": ["**/details_harness|gsm8k|5_2023-10-21T18-19-50.042065.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-21T18-19-50.042065.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hellaswag|10_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T07:58:23.659880.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T07:58:23.659880.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T07:58:23.659880.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_19T11_48_14.833652", "path": ["**/details_harness|winogrande|5_2023-10-19T11-48-14.833652.parquet"]}, {"split": "2023_10_21T18_19_50.042065", "path": ["**/details_harness|winogrande|5_2023-10-21T18-19-50.042065.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-21T18-19-50.042065.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_18T07_58_23.659880", "path": ["results_2023-08-18T07:58:23.659880.parquet"]}, {"split": "2023_10_19T11_48_14.833652", "path": ["results_2023-10-19T11-48-14.833652.parquet"]}, {"split": "2023_10_21T18_19_50.042065", "path": ["results_2023-10-21T18-19-50.042065.parquet"]}, {"split": "latest", "path": ["results_2023-10-21T18-19-50.042065.parquet"]}]}]}
2023-10-21T17:20:03+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of lmsys/vicuna-7b-v1.5-16k ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model lmsys/vicuna-7b-v1.5-16k on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-21T18:19:50.042065(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of lmsys/vicuna-7b-v1.5-16k", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model lmsys/vicuna-7b-v1.5-16k on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-21T18:19:50.042065(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of lmsys/vicuna-7b-v1.5-16k", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model lmsys/vicuna-7b-v1.5-16k on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-21T18:19:50.042065(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 22, 31, 170, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of lmsys/vicuna-7b-v1.5-16k## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model lmsys/vicuna-7b-v1.5-16k on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-21T18:19:50.042065(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
7d4948077b5e4639ff064b13bda83edd8b7bd24c
# Dataset Card for Evaluation run of The-Face-Of-Goonery/Huginn-22b-Prototype ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/The-Face-Of-Goonery/Huginn-22b-Prototype - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [The-Face-Of-Goonery/Huginn-22b-Prototype](https://huggingface.co/The-Face-Of-Goonery/Huginn-22b-Prototype) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_The-Face-Of-Goonery__Huginn-22b-Prototype", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-15T14:13:40.771756](https://huggingface.co/datasets/open-llm-leaderboard/details_The-Face-Of-Goonery__Huginn-22b-Prototype/blob/main/results_2023-10-15T14-13-40.771756.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.046665268456375836, "em_stderr": 0.0021600273157654512, "f1": 0.11504928691275146, "f1_stderr": 0.0025720161293884478, "acc": 0.36930437483133105, "acc_stderr": 0.008391006712261204 }, "harness|drop|3": { "em": 0.046665268456375836, "em_stderr": 0.0021600273157654512, "f1": 0.11504928691275146, "f1_stderr": 0.0025720161293884478 }, "harness|gsm8k|5": { "acc": 0.022744503411675512, "acc_stderr": 0.0041066206377496795 }, "harness|winogrande|5": { "acc": 0.7158642462509865, "acc_stderr": 0.01267539278677273 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_The-Face-Of-Goonery__Huginn-22b-Prototype
[ "region:us" ]
2023-08-18T17:49:13+00:00
{"pretty_name": "Evaluation run of The-Face-Of-Goonery/Huginn-22b-Prototype", "dataset_summary": "Dataset automatically created during the evaluation run of model [The-Face-Of-Goonery/Huginn-22b-Prototype](https://huggingface.co/The-Face-Of-Goonery/Huginn-22b-Prototype) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_The-Face-Of-Goonery__Huginn-22b-Prototype\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-15T14:13:40.771756](https://huggingface.co/datasets/open-llm-leaderboard/details_The-Face-Of-Goonery__Huginn-22b-Prototype/blob/main/results_2023-10-15T14-13-40.771756.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.046665268456375836,\n \"em_stderr\": 0.0021600273157654512,\n \"f1\": 0.11504928691275146,\n \"f1_stderr\": 0.0025720161293884478,\n \"acc\": 0.36930437483133105,\n \"acc_stderr\": 0.008391006712261204\n },\n \"harness|drop|3\": {\n \"em\": 0.046665268456375836,\n \"em_stderr\": 0.0021600273157654512,\n \"f1\": 0.11504928691275146,\n \"f1_stderr\": 0.0025720161293884478\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.022744503411675512,\n \"acc_stderr\": 0.0041066206377496795\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7158642462509865,\n \"acc_stderr\": 0.01267539278677273\n }\n}\n```", "repo_url": "https://huggingface.co/The-Face-Of-Goonery/Huginn-22b-Prototype", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|arc:challenge|25_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_15T14_13_40.771756", "path": ["**/details_harness|drop|3_2023-10-15T14-13-40.771756.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-15T14-13-40.771756.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_15T14_13_40.771756", "path": ["**/details_harness|gsm8k|5_2023-10-15T14-13-40.771756.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-15T14-13-40.771756.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hellaswag|10_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T17:52:21.766212.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T17:52:21.766212.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T17:52:21.766212.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_15T14_13_40.771756", "path": ["**/details_harness|winogrande|5_2023-10-15T14-13-40.771756.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-15T14-13-40.771756.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T17_52_21.766212", "path": ["results_2023-08-17T17:52:21.766212.parquet"]}, {"split": "2023_10_15T14_13_40.771756", "path": ["results_2023-10-15T14-13-40.771756.parquet"]}, {"split": "latest", "path": ["results_2023-10-15T14-13-40.771756.parquet"]}]}]}
2023-10-15T13:13:53+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of The-Face-Of-Goonery/Huginn-22b-Prototype ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model The-Face-Of-Goonery/Huginn-22b-Prototype on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-15T14:13:40.771756(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of The-Face-Of-Goonery/Huginn-22b-Prototype", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model The-Face-Of-Goonery/Huginn-22b-Prototype on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T14:13:40.771756(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of The-Face-Of-Goonery/Huginn-22b-Prototype", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model The-Face-Of-Goonery/Huginn-22b-Prototype on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-15T14:13:40.771756(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 28, 31, 176, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of The-Face-Of-Goonery/Huginn-22b-Prototype## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model The-Face-Of-Goonery/Huginn-22b-Prototype on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-15T14:13:40.771756(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
bbc9b099d482c62a9693dbcbb020537864396474
# Dataset Card for Evaluation run of facebook/opt-1.3b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/facebook/opt-1.3b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [facebook/opt-1.3b](https://huggingface.co/facebook/opt-1.3b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_facebook__opt-1.3b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-19T03:17:25.770385](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__opt-1.3b/blob/main/results_2023-10-19T03-17-25.770385.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0017827181208053692, "em_stderr": 0.0004320097346038933, "f1": 0.05017722315436251, "f1_stderr": 0.0012387308214165103, "acc": 0.2994953245415047, "acc_stderr": 0.0074273230901261535 }, "harness|drop|3": { "em": 0.0017827181208053692, "em_stderr": 0.0004320097346038933, "f1": 0.05017722315436251, "f1_stderr": 0.0012387308214165103 }, "harness|gsm8k|5": { "acc": 0.001516300227445034, "acc_stderr": 0.0010717793485492619 }, "harness|winogrande|5": { "acc": 0.5974743488555643, "acc_stderr": 0.013782866831703044 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_facebook__opt-1.3b
[ "region:us" ]
2023-08-18T17:49:26+00:00
{"pretty_name": "Evaluation run of facebook/opt-1.3b", "dataset_summary": "Dataset automatically created during the evaluation run of model [facebook/opt-1.3b](https://huggingface.co/facebook/opt-1.3b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_facebook__opt-1.3b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-19T03:17:25.770385](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__opt-1.3b/blob/main/results_2023-10-19T03-17-25.770385.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0017827181208053692,\n \"em_stderr\": 0.0004320097346038933,\n \"f1\": 0.05017722315436251,\n \"f1_stderr\": 0.0012387308214165103,\n \"acc\": 0.2994953245415047,\n \"acc_stderr\": 0.0074273230901261535\n },\n \"harness|drop|3\": {\n \"em\": 0.0017827181208053692,\n \"em_stderr\": 0.0004320097346038933,\n \"f1\": 0.05017722315436251,\n \"f1_stderr\": 0.0012387308214165103\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.001516300227445034,\n \"acc_stderr\": 0.0010717793485492619\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5974743488555643,\n \"acc_stderr\": 0.013782866831703044\n }\n}\n```", "repo_url": "https://huggingface.co/facebook/opt-1.3b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|arc:challenge|25_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_19T03_17_25.770385", "path": ["**/details_harness|drop|3_2023-10-19T03-17-25.770385.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-19T03-17-25.770385.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_19T03_17_25.770385", "path": ["**/details_harness|gsm8k|5_2023-10-19T03-17-25.770385.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-19T03-17-25.770385.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hellaswag|10_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T14:50:30.777525.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T14:50:30.777525.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T14:50:30.777525.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_19T03_17_25.770385", "path": ["**/details_harness|winogrande|5_2023-10-19T03-17-25.770385.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-19T03-17-25.770385.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_18T14_50_30.777525", "path": ["results_2023-08-18T14:50:30.777525.parquet"]}, {"split": "2023_10_19T03_17_25.770385", "path": ["results_2023-10-19T03-17-25.770385.parquet"]}, {"split": "latest", "path": ["results_2023-10-19T03-17-25.770385.parquet"]}]}]}
2023-10-19T02:17:38+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of facebook/opt-1.3b ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model facebook/opt-1.3b on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-19T03:17:25.770385(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of facebook/opt-1.3b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/opt-1.3b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-19T03:17:25.770385(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of facebook/opt-1.3b", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/opt-1.3b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-19T03:17:25.770385(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 16, 31, 164, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of facebook/opt-1.3b## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/opt-1.3b on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-19T03:17:25.770385(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
fb6b4e3f7bd3c6afbef27f85338c5db7ddc01ee7
# Dataset Card for Evaluation run of facebook/xglm-1.7B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/facebook/xglm-1.7B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [facebook/xglm-1.7B](https://huggingface.co/facebook/xglm-1.7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_facebook__xglm-1.7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-18T12:55:05.942892](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__xglm-1.7B/blob/main/results_2023-10-18T12-55-05.942892.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.06554110738255034, "em_stderr": 0.002534408633945897, "f1": 0.11142932046979848, "f1_stderr": 0.002755989095682693, "acc": 0.27332508363885727, "acc_stderr": 0.008199401596528776 }, "harness|drop|3": { "em": 0.06554110738255034, "em_stderr": 0.002534408633945897, "f1": 0.11142932046979848, "f1_stderr": 0.002755989095682693 }, "harness|gsm8k|5": { "acc": 0.0075815011372251705, "acc_stderr": 0.0023892815120772396 }, "harness|winogrande|5": { "acc": 0.5390686661404893, "acc_stderr": 0.014009521680980314 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_facebook__xglm-1.7B
[ "region:us" ]
2023-08-18T17:51:44+00:00
{"pretty_name": "Evaluation run of facebook/xglm-1.7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [facebook/xglm-1.7B](https://huggingface.co/facebook/xglm-1.7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_facebook__xglm-1.7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-18T12:55:05.942892](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__xglm-1.7B/blob/main/results_2023-10-18T12-55-05.942892.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.06554110738255034,\n \"em_stderr\": 0.002534408633945897,\n \"f1\": 0.11142932046979848,\n \"f1_stderr\": 0.002755989095682693,\n \"acc\": 0.27332508363885727,\n \"acc_stderr\": 0.008199401596528776\n },\n \"harness|drop|3\": {\n \"em\": 0.06554110738255034,\n \"em_stderr\": 0.002534408633945897,\n \"f1\": 0.11142932046979848,\n \"f1_stderr\": 0.002755989095682693\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0075815011372251705,\n \"acc_stderr\": 0.0023892815120772396\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5390686661404893,\n \"acc_stderr\": 0.014009521680980314\n }\n}\n```", "repo_url": "https://huggingface.co/facebook/xglm-1.7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|arc:challenge|25_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_18T12_55_05.942892", "path": ["**/details_harness|drop|3_2023-10-18T12-55-05.942892.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-18T12-55-05.942892.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_18T12_55_05.942892", "path": ["**/details_harness|gsm8k|5_2023-10-18T12-55-05.942892.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-18T12-55-05.942892.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hellaswag|10_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T06:53:01.114817.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T06:53:01.114817.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T06:53:01.114817.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_18T12_55_05.942892", "path": ["**/details_harness|winogrande|5_2023-10-18T12-55-05.942892.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-18T12-55-05.942892.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_18T06_53_01.114817", "path": ["results_2023-08-18T06:53:01.114817.parquet"]}, {"split": "2023_10_18T12_55_05.942892", "path": ["results_2023-10-18T12-55-05.942892.parquet"]}, {"split": "latest", "path": ["results_2023-10-18T12-55-05.942892.parquet"]}]}]}
2023-10-18T11:55:18+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of facebook/xglm-1.7B ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model facebook/xglm-1.7B on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-18T12:55:05.942892(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of facebook/xglm-1.7B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/xglm-1.7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-18T12:55:05.942892(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of facebook/xglm-1.7B", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/xglm-1.7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-18T12:55:05.942892(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 17, 31, 165, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of facebook/xglm-1.7B## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model facebook/xglm-1.7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-18T12:55:05.942892(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
ff3d4adcaa00562fa3e4953612e03250184c24f4
# Dataset Card for Evaluation run of Harshvir/Llama-2-7B-physics ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Harshvir/Llama-2-7B-physics - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [Harshvir/Llama-2-7B-physics](https://huggingface.co/Harshvir/Llama-2-7B-physics) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Harshvir__Llama-2-7B-physics", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-17T20:39:36.366627](https://huggingface.co/datasets/open-llm-leaderboard/details_Harshvir__Llama-2-7B-physics/blob/main/results_2023-09-17T20-39-36.366627.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.03680788590604027, "em_stderr": 0.0019282642409219751, "f1": 0.10780620805369148, "f1_stderr": 0.0024191974799882767, "acc": 0.39476463537886264, "acc_stderr": 0.009842042454929716 }, "harness|drop|3": { "em": 0.03680788590604027, "em_stderr": 0.0019282642409219751, "f1": 0.10780620805369148, "f1_stderr": 0.0024191974799882767 }, "harness|gsm8k|5": { "acc": 0.07050796057619409, "acc_stderr": 0.007051543813983609 }, "harness|winogrande|5": { "acc": 0.7190213101815311, "acc_stderr": 0.012632541095875824 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_Harshvir__Llama-2-7B-physics
[ "region:us" ]
2023-08-18T17:51:53+00:00
{"pretty_name": "Evaluation run of Harshvir/Llama-2-7B-physics", "dataset_summary": "Dataset automatically created during the evaluation run of model [Harshvir/Llama-2-7B-physics](https://huggingface.co/Harshvir/Llama-2-7B-physics) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Harshvir__Llama-2-7B-physics\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-17T20:39:36.366627](https://huggingface.co/datasets/open-llm-leaderboard/details_Harshvir__Llama-2-7B-physics/blob/main/results_2023-09-17T20-39-36.366627.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.03680788590604027,\n \"em_stderr\": 0.0019282642409219751,\n \"f1\": 0.10780620805369148,\n \"f1_stderr\": 0.0024191974799882767,\n \"acc\": 0.39476463537886264,\n \"acc_stderr\": 0.009842042454929716\n },\n \"harness|drop|3\": {\n \"em\": 0.03680788590604027,\n \"em_stderr\": 0.0019282642409219751,\n \"f1\": 0.10780620805369148,\n \"f1_stderr\": 0.0024191974799882767\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.07050796057619409,\n \"acc_stderr\": 0.007051543813983609\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7190213101815311,\n \"acc_stderr\": 0.012632541095875824\n }\n}\n```", "repo_url": "https://huggingface.co/Harshvir/Llama-2-7B-physics", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|arc:challenge|25_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_17T20_39_36.366627", "path": ["**/details_harness|drop|3_2023-09-17T20-39-36.366627.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-17T20-39-36.366627.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_17T20_39_36.366627", "path": ["**/details_harness|gsm8k|5_2023-09-17T20-39-36.366627.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-17T20-39-36.366627.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hellaswag|10_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T21:02:56.107134.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T21:02:56.107134.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T21:02:56.107134.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_17T20_39_36.366627", "path": ["**/details_harness|winogrande|5_2023-09-17T20-39-36.366627.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-17T20-39-36.366627.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T21_02_56.107134", "path": ["results_2023-08-17T21:02:56.107134.parquet"]}, {"split": "2023_09_17T20_39_36.366627", "path": ["results_2023-09-17T20-39-36.366627.parquet"]}, {"split": "latest", "path": ["results_2023-09-17T20-39-36.366627.parquet"]}]}]}
2023-09-17T19:39:48+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Harshvir/Llama-2-7B-physics ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model Harshvir/Llama-2-7B-physics on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-17T20:39:36.366627(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of Harshvir/Llama-2-7B-physics", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Harshvir/Llama-2-7B-physics on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T20:39:36.366627(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Harshvir/Llama-2-7B-physics", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model Harshvir/Llama-2-7B-physics on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-17T20:39:36.366627(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 22, 31, 170, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Harshvir/Llama-2-7B-physics## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model Harshvir/Llama-2-7B-physics on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-17T20:39:36.366627(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
f9bf6c5488b3769da36eab8b2561bd5d8bc1c7e2
# Dataset Card for Evaluation run of TheTravellingEngineer/llama2-7b-chat-hf-v3 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/TheTravellingEngineer/llama2-7b-chat-hf-v3 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [TheTravellingEngineer/llama2-7b-chat-hf-v3](https://huggingface.co/TheTravellingEngineer/llama2-7b-chat-hf-v3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TheTravellingEngineer__llama2-7b-chat-hf-v3", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-21T22:05:13.149826](https://huggingface.co/datasets/open-llm-leaderboard/details_TheTravellingEngineer__llama2-7b-chat-hf-v3/blob/main/results_2023-10-21T22-05-13.149826.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0016778523489932886, "em_stderr": 0.00041913301788269335, "f1": 0.057038590604026815, "f1_stderr": 0.0013245352325167287, "acc": 0.3977448774004846, "acc_stderr": 0.009465436524136249 }, "harness|drop|3": { "em": 0.0016778523489932886, "em_stderr": 0.00041913301788269335, "f1": 0.057038590604026815, "f1_stderr": 0.0013245352325167287 }, "harness|gsm8k|5": { "acc": 0.05989385898407885, "acc_stderr": 0.006536148151288697 }, "harness|winogrande|5": { "acc": 0.7355958958168903, "acc_stderr": 0.012394724896983799 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_TheTravellingEngineer__llama2-7b-chat-hf-v3
[ "region:us" ]
2023-08-18T17:52:05+00:00
{"pretty_name": "Evaluation run of TheTravellingEngineer/llama2-7b-chat-hf-v3", "dataset_summary": "Dataset automatically created during the evaluation run of model [TheTravellingEngineer/llama2-7b-chat-hf-v3](https://huggingface.co/TheTravellingEngineer/llama2-7b-chat-hf-v3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TheTravellingEngineer__llama2-7b-chat-hf-v3\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-21T22:05:13.149826](https://huggingface.co/datasets/open-llm-leaderboard/details_TheTravellingEngineer__llama2-7b-chat-hf-v3/blob/main/results_2023-10-21T22-05-13.149826.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0016778523489932886,\n \"em_stderr\": 0.00041913301788269335,\n \"f1\": 0.057038590604026815,\n \"f1_stderr\": 0.0013245352325167287,\n \"acc\": 0.3977448774004846,\n \"acc_stderr\": 0.009465436524136249\n },\n \"harness|drop|3\": {\n \"em\": 0.0016778523489932886,\n \"em_stderr\": 0.00041913301788269335,\n \"f1\": 0.057038590604026815,\n \"f1_stderr\": 0.0013245352325167287\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.05989385898407885,\n \"acc_stderr\": 0.006536148151288697\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7355958958168903,\n \"acc_stderr\": 0.012394724896983799\n }\n}\n```", "repo_url": "https://huggingface.co/TheTravellingEngineer/llama2-7b-chat-hf-v3", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|arc:challenge|25_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_16T19_25_46.830235", "path": ["**/details_harness|drop|3_2023-10-16T19-25-46.830235.parquet"]}, {"split": "2023_10_21T22_05_13.149826", "path": ["**/details_harness|drop|3_2023-10-21T22-05-13.149826.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-21T22-05-13.149826.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_16T19_25_46.830235", "path": ["**/details_harness|gsm8k|5_2023-10-16T19-25-46.830235.parquet"]}, {"split": "2023_10_21T22_05_13.149826", "path": ["**/details_harness|gsm8k|5_2023-10-21T22-05-13.149826.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-21T22-05-13.149826.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hellaswag|10_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T18:37:31.585910.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T18:37:31.585910.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T18:37:31.585910.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_16T19_25_46.830235", "path": ["**/details_harness|winogrande|5_2023-10-16T19-25-46.830235.parquet"]}, {"split": "2023_10_21T22_05_13.149826", "path": ["**/details_harness|winogrande|5_2023-10-21T22-05-13.149826.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-21T22-05-13.149826.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T18_37_31.585910", "path": ["results_2023-08-17T18:37:31.585910.parquet"]}, {"split": "2023_10_16T19_25_46.830235", "path": ["results_2023-10-16T19-25-46.830235.parquet"]}, {"split": "2023_10_21T22_05_13.149826", "path": ["results_2023-10-21T22-05-13.149826.parquet"]}, {"split": "latest", "path": ["results_2023-10-21T22-05-13.149826.parquet"]}]}]}
2023-10-21T21:05:26+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of TheTravellingEngineer/llama2-7b-chat-hf-v3 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model TheTravellingEngineer/llama2-7b-chat-hf-v3 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-21T22:05:13.149826(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of TheTravellingEngineer/llama2-7b-chat-hf-v3", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model TheTravellingEngineer/llama2-7b-chat-hf-v3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-21T22:05:13.149826(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of TheTravellingEngineer/llama2-7b-chat-hf-v3", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model TheTravellingEngineer/llama2-7b-chat-hf-v3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-21T22:05:13.149826(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 29, 31, 177, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of TheTravellingEngineer/llama2-7b-chat-hf-v3## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model TheTravellingEngineer/llama2-7b-chat-hf-v3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-21T22:05:13.149826(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
f18af835f63ab32866442ea4ef5d14ca76861ce2
# Dataset Card for Evaluation run of jondurbin/airoboros-33b-gpt4-1.3 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/jondurbin/airoboros-33b-gpt4-1.3 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [jondurbin/airoboros-33b-gpt4-1.3](https://huggingface.co/jondurbin/airoboros-33b-gpt4-1.3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jondurbin__airoboros-33b-gpt4-1.3", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-22T21:52:38.405069](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-33b-gpt4-1.3/blob/main/results_2023-10-22T21-52-38.405069.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.01950503355704698, "em_stderr": 0.0014162361849700588, "f1": 0.08725671140939598, "f1_stderr": 0.0019162183364708429, "acc": 0.45865000212425644, "acc_stderr": 0.010392294007638424 }, "harness|drop|3": { "em": 0.01950503355704698, "em_stderr": 0.0014162361849700588, "f1": 0.08725671140939598, "f1_stderr": 0.0019162183364708429 }, "harness|gsm8k|5": { "acc": 0.13040181956027294, "acc_stderr": 0.009275630324554088 }, "harness|winogrande|5": { "acc": 0.7868981846882399, "acc_stderr": 0.01150895769072276 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_jondurbin__airoboros-33b-gpt4-1.3
[ "region:us" ]
2023-08-18T17:52:20+00:00
{"pretty_name": "Evaluation run of jondurbin/airoboros-33b-gpt4-1.3", "dataset_summary": "Dataset automatically created during the evaluation run of model [jondurbin/airoboros-33b-gpt4-1.3](https://huggingface.co/jondurbin/airoboros-33b-gpt4-1.3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jondurbin__airoboros-33b-gpt4-1.3\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-22T21:52:38.405069](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-33b-gpt4-1.3/blob/main/results_2023-10-22T21-52-38.405069.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.01950503355704698,\n \"em_stderr\": 0.0014162361849700588,\n \"f1\": 0.08725671140939598,\n \"f1_stderr\": 0.0019162183364708429,\n \"acc\": 0.45865000212425644,\n \"acc_stderr\": 0.010392294007638424\n },\n \"harness|drop|3\": {\n \"em\": 0.01950503355704698,\n \"em_stderr\": 0.0014162361849700588,\n \"f1\": 0.08725671140939598,\n \"f1_stderr\": 0.0019162183364708429\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.13040181956027294,\n \"acc_stderr\": 0.009275630324554088\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7868981846882399,\n \"acc_stderr\": 0.01150895769072276\n }\n}\n```", "repo_url": "https://huggingface.co/jondurbin/airoboros-33b-gpt4-1.3", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|arc:challenge|25_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|arc:challenge|25_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_18T13_47_41.719398", "path": ["**/details_harness|drop|3_2023-10-18T13-47-41.719398.parquet"]}, {"split": "2023_10_22T21_52_38.405069", "path": ["**/details_harness|drop|3_2023-10-22T21-52-38.405069.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-22T21-52-38.405069.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_18T13_47_41.719398", "path": ["**/details_harness|gsm8k|5_2023-10-18T13-47-41.719398.parquet"]}, {"split": "2023_10_22T21_52_38.405069", "path": ["**/details_harness|gsm8k|5_2023-10-22T21-52-38.405069.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-22T21-52-38.405069.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hellaswag|10_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hellaswag|10_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T14:12:32.965020.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T17:42:39.017472.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_08_18T17_42_39.017472", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T17:42:39.017472.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T17:42:39.017472.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_18T13_47_41.719398", "path": ["**/details_harness|winogrande|5_2023-10-18T13-47-41.719398.parquet"]}, {"split": "2023_10_22T21_52_38.405069", "path": ["**/details_harness|winogrande|5_2023-10-22T21-52-38.405069.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-22T21-52-38.405069.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_18T14_12_32.965020", "path": ["results_2023-08-18T14:12:32.965020.parquet"]}, {"split": "2023_10_18T13_47_41.719398", "path": ["results_2023-10-18T13-47-41.719398.parquet"]}, {"split": "2023_10_22T21_52_38.405069", "path": ["results_2023-10-22T21-52-38.405069.parquet"]}, {"split": "latest", "path": ["results_2023-10-22T21-52-38.405069.parquet"]}]}]}
2023-10-22T20:52:51+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of jondurbin/airoboros-33b-gpt4-1.3 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model jondurbin/airoboros-33b-gpt4-1.3 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-22T21:52:38.405069(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of jondurbin/airoboros-33b-gpt4-1.3", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-33b-gpt4-1.3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-22T21:52:38.405069(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of jondurbin/airoboros-33b-gpt4-1.3", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-33b-gpt4-1.3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-22T21:52:38.405069(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 23, 31, 171, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of jondurbin/airoboros-33b-gpt4-1.3## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-33b-gpt4-1.3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 4 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-22T21:52:38.405069(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
191d7efc2cbd682c0053e9b9a37d0972f03014a9
# Dataset Card for Evaluation run of jondurbin/airoboros-l2-7b-gpt4-m2.0 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/jondurbin/airoboros-l2-7b-gpt4-m2.0 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [jondurbin/airoboros-l2-7b-gpt4-m2.0](https://huggingface.co/jondurbin/airoboros-l2-7b-gpt4-m2.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jondurbin__airoboros-l2-7b-gpt4-m2.0", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-18T20:05:57.910651](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-l2-7b-gpt4-m2.0/blob/main/results_2023-10-18T20-05-57.910651.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.16799496644295303, "em_stderr": 0.0038286949270672057, "f1": 0.24476510067114088, "f1_stderr": 0.003911929321827723, "acc": 0.3681417184217313, "acc_stderr": 0.009196861647809822 }, "harness|drop|3": { "em": 0.16799496644295303, "em_stderr": 0.0038286949270672057, "f1": 0.24476510067114088, "f1_stderr": 0.003911929321827723 }, "harness|gsm8k|5": { "acc": 0.04094010614101592, "acc_stderr": 0.005458076796294343 }, "harness|winogrande|5": { "acc": 0.6953433307024467, "acc_stderr": 0.012935646499325302 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_jondurbin__airoboros-l2-7b-gpt4-m2.0
[ "region:us" ]
2023-08-18T17:52:29+00:00
{"pretty_name": "Evaluation run of jondurbin/airoboros-l2-7b-gpt4-m2.0", "dataset_summary": "Dataset automatically created during the evaluation run of model [jondurbin/airoboros-l2-7b-gpt4-m2.0](https://huggingface.co/jondurbin/airoboros-l2-7b-gpt4-m2.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jondurbin__airoboros-l2-7b-gpt4-m2.0\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-18T20:05:57.910651](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-l2-7b-gpt4-m2.0/blob/main/results_2023-10-18T20-05-57.910651.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.16799496644295303,\n \"em_stderr\": 0.0038286949270672057,\n \"f1\": 0.24476510067114088,\n \"f1_stderr\": 0.003911929321827723,\n \"acc\": 0.3681417184217313,\n \"acc_stderr\": 0.009196861647809822\n },\n \"harness|drop|3\": {\n \"em\": 0.16799496644295303,\n \"em_stderr\": 0.0038286949270672057,\n \"f1\": 0.24476510067114088,\n \"f1_stderr\": 0.003911929321827723\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.04094010614101592,\n \"acc_stderr\": 0.005458076796294343\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6953433307024467,\n \"acc_stderr\": 0.012935646499325302\n }\n}\n```", "repo_url": "https://huggingface.co/jondurbin/airoboros-l2-7b-gpt4-m2.0", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|arc:challenge|25_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_18T20_05_57.910651", "path": ["**/details_harness|drop|3_2023-10-18T20-05-57.910651.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-18T20-05-57.910651.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_18T20_05_57.910651", "path": ["**/details_harness|gsm8k|5_2023-10-18T20-05-57.910651.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-18T20-05-57.910651.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hellaswag|10_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T12:14:57.901258.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T12:14:57.901258.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T12:14:57.901258.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_18T20_05_57.910651", "path": ["**/details_harness|winogrande|5_2023-10-18T20-05-57.910651.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-18T20-05-57.910651.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_18T12_14_57.901258", "path": ["results_2023-08-18T12:14:57.901258.parquet"]}, {"split": "2023_10_18T20_05_57.910651", "path": ["results_2023-10-18T20-05-57.910651.parquet"]}, {"split": "latest", "path": ["results_2023-10-18T20-05-57.910651.parquet"]}]}]}
2023-10-18T19:06:10+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of jondurbin/airoboros-l2-7b-gpt4-m2.0 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model jondurbin/airoboros-l2-7b-gpt4-m2.0 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-18T20:05:57.910651(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of jondurbin/airoboros-l2-7b-gpt4-m2.0", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-l2-7b-gpt4-m2.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-18T20:05:57.910651(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of jondurbin/airoboros-l2-7b-gpt4-m2.0", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-l2-7b-gpt4-m2.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-18T20:05:57.910651(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 27, 31, 175, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of jondurbin/airoboros-l2-7b-gpt4-m2.0## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-l2-7b-gpt4-m2.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-18T20:05:57.910651(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
c6ac52fe5bc1871f671f70a05b5fd5344a2324b4
# Dataset Card for Evaluation run of jondurbin/airoboros-l2-13b-gpt4-m2.0 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/jondurbin/airoboros-l2-13b-gpt4-m2.0 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [jondurbin/airoboros-l2-13b-gpt4-m2.0](https://huggingface.co/jondurbin/airoboros-l2-13b-gpt4-m2.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jondurbin__airoboros-l2-13b-gpt4-m2.0", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-22T10:34:16.323837](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-l2-13b-gpt4-m2.0/blob/main/results_2023-10-22T10-34-16.323837.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.07319630872483221, "em_stderr": 0.002667338409592003, "f1": 0.1402590184563752, "f1_stderr": 0.0029073464846959135, "acc": 0.4114071373819467, "acc_stderr": 0.010061446818381128 }, "harness|drop|3": { "em": 0.07319630872483221, "em_stderr": 0.002667338409592003, "f1": 0.1402590184563752, "f1_stderr": 0.0029073464846959135 }, "harness|gsm8k|5": { "acc": 0.08642911296436695, "acc_stderr": 0.007740044337103796 }, "harness|winogrande|5": { "acc": 0.7363851617995264, "acc_stderr": 0.012382849299658463 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_jondurbin__airoboros-l2-13b-gpt4-m2.0
[ "region:us" ]
2023-08-18T17:52:38+00:00
{"pretty_name": "Evaluation run of jondurbin/airoboros-l2-13b-gpt4-m2.0", "dataset_summary": "Dataset automatically created during the evaluation run of model [jondurbin/airoboros-l2-13b-gpt4-m2.0](https://huggingface.co/jondurbin/airoboros-l2-13b-gpt4-m2.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jondurbin__airoboros-l2-13b-gpt4-m2.0\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-22T10:34:16.323837](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-l2-13b-gpt4-m2.0/blob/main/results_2023-10-22T10-34-16.323837.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.07319630872483221,\n \"em_stderr\": 0.002667338409592003,\n \"f1\": 0.1402590184563752,\n \"f1_stderr\": 0.0029073464846959135,\n \"acc\": 0.4114071373819467,\n \"acc_stderr\": 0.010061446818381128\n },\n \"harness|drop|3\": {\n \"em\": 0.07319630872483221,\n \"em_stderr\": 0.002667338409592003,\n \"f1\": 0.1402590184563752,\n \"f1_stderr\": 0.0029073464846959135\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.08642911296436695,\n \"acc_stderr\": 0.007740044337103796\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7363851617995264,\n \"acc_stderr\": 0.012382849299658463\n }\n}\n```", "repo_url": "https://huggingface.co/jondurbin/airoboros-l2-13b-gpt4-m2.0", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|arc:challenge|25_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_22T10_34_16.323837", "path": ["**/details_harness|drop|3_2023-10-22T10-34-16.323837.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-22T10-34-16.323837.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_22T10_34_16.323837", "path": ["**/details_harness|gsm8k|5_2023-10-22T10-34-16.323837.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-22T10-34-16.323837.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hellaswag|10_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T08:32:42.679525.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T08:32:42.679525.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T08:32:42.679525.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_22T10_34_16.323837", "path": ["**/details_harness|winogrande|5_2023-10-22T10-34-16.323837.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-22T10-34-16.323837.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_18T08_32_42.679525", "path": ["results_2023-08-18T08:32:42.679525.parquet"]}, {"split": "2023_10_22T10_34_16.323837", "path": ["results_2023-10-22T10-34-16.323837.parquet"]}, {"split": "latest", "path": ["results_2023-10-22T10-34-16.323837.parquet"]}]}]}
2023-10-22T09:34:29+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of jondurbin/airoboros-l2-13b-gpt4-m2.0 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model jondurbin/airoboros-l2-13b-gpt4-m2.0 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-22T10:34:16.323837(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of jondurbin/airoboros-l2-13b-gpt4-m2.0", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-l2-13b-gpt4-m2.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-22T10:34:16.323837(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of jondurbin/airoboros-l2-13b-gpt4-m2.0", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-l2-13b-gpt4-m2.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-22T10:34:16.323837(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 27, 31, 175, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of jondurbin/airoboros-l2-13b-gpt4-m2.0## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-l2-13b-gpt4-m2.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-22T10:34:16.323837(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
925287ff4a360ad675a640944674dcc7e1786421
# Dataset Card for Evaluation run of jondurbin/airoboros-65b-gpt4-2.0 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/jondurbin/airoboros-65b-gpt4-2.0 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [jondurbin/airoboros-65b-gpt4-2.0](https://huggingface.co/jondurbin/airoboros-65b-gpt4-2.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jondurbin__airoboros-65b-gpt4-2.0", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-23T00:06:27.390868](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-65b-gpt4-2.0/blob/main/results_2023-10-23T00-06-27.390868.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.057151845637583895, "em_stderr": 0.0023772543944610342, "f1": 0.12996224832214703, "f1_stderr": 0.002697055815084332, "acc": 0.5040710925798825, "acc_stderr": 0.011157098318797569 }, "harness|drop|3": { "em": 0.057151845637583895, "em_stderr": 0.0023772543944610342, "f1": 0.12996224832214703, "f1_stderr": 0.002697055815084332 }, "harness|gsm8k|5": { "acc": 0.20545868081880211, "acc_stderr": 0.011129170248544765 }, "harness|winogrande|5": { "acc": 0.8026835043409629, "acc_stderr": 0.011185026389050372 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_jondurbin__airoboros-65b-gpt4-2.0
[ "region:us" ]
2023-08-18T17:52:47+00:00
{"pretty_name": "Evaluation run of jondurbin/airoboros-65b-gpt4-2.0", "dataset_summary": "Dataset automatically created during the evaluation run of model [jondurbin/airoboros-65b-gpt4-2.0](https://huggingface.co/jondurbin/airoboros-65b-gpt4-2.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jondurbin__airoboros-65b-gpt4-2.0\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-23T00:06:27.390868](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-65b-gpt4-2.0/blob/main/results_2023-10-23T00-06-27.390868.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.057151845637583895,\n \"em_stderr\": 0.0023772543944610342,\n \"f1\": 0.12996224832214703,\n \"f1_stderr\": 0.002697055815084332,\n \"acc\": 0.5040710925798825,\n \"acc_stderr\": 0.011157098318797569\n },\n \"harness|drop|3\": {\n \"em\": 0.057151845637583895,\n \"em_stderr\": 0.0023772543944610342,\n \"f1\": 0.12996224832214703,\n \"f1_stderr\": 0.002697055815084332\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.20545868081880211,\n \"acc_stderr\": 0.011129170248544765\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8026835043409629,\n \"acc_stderr\": 0.011185026389050372\n }\n}\n```", "repo_url": "https://huggingface.co/jondurbin/airoboros-65b-gpt4-2.0", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|arc:challenge|25_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_22T19_00_27.180103", "path": ["**/details_harness|drop|3_2023-10-22T19-00-27.180103.parquet"]}, {"split": "2023_10_23T00_06_27.390868", "path": ["**/details_harness|drop|3_2023-10-23T00-06-27.390868.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-23T00-06-27.390868.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_22T19_00_27.180103", "path": ["**/details_harness|gsm8k|5_2023-10-22T19-00-27.180103.parquet"]}, {"split": "2023_10_23T00_06_27.390868", "path": ["**/details_harness|gsm8k|5_2023-10-23T00-06-27.390868.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-23T00-06-27.390868.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hellaswag|10_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T18:40:33.016233.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T18:40:33.016233.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T18:40:33.016233.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_22T19_00_27.180103", "path": ["**/details_harness|winogrande|5_2023-10-22T19-00-27.180103.parquet"]}, {"split": "2023_10_23T00_06_27.390868", "path": ["**/details_harness|winogrande|5_2023-10-23T00-06-27.390868.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-23T00-06-27.390868.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T18_40_33.016233", "path": ["results_2023-08-17T18:40:33.016233.parquet"]}, {"split": "2023_10_22T19_00_27.180103", "path": ["results_2023-10-22T19-00-27.180103.parquet"]}, {"split": "2023_10_23T00_06_27.390868", "path": ["results_2023-10-23T00-06-27.390868.parquet"]}, {"split": "latest", "path": ["results_2023-10-23T00-06-27.390868.parquet"]}]}]}
2023-10-22T23:06:40+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of jondurbin/airoboros-65b-gpt4-2.0 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model jondurbin/airoboros-65b-gpt4-2.0 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-23T00:06:27.390868(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of jondurbin/airoboros-65b-gpt4-2.0", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-65b-gpt4-2.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T00:06:27.390868(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of jondurbin/airoboros-65b-gpt4-2.0", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-65b-gpt4-2.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T00:06:27.390868(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 24, 31, 172, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of jondurbin/airoboros-65b-gpt4-2.0## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-65b-gpt4-2.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-23T00:06:27.390868(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
72b65d7c6f0be219c70462031b919228b0336bdf
# Dataset Card for Evaluation run of jondurbin/airoboros-l2-13b-gpt4-2.0 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/jondurbin/airoboros-l2-13b-gpt4-2.0 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [jondurbin/airoboros-l2-13b-gpt4-2.0](https://huggingface.co/jondurbin/airoboros-l2-13b-gpt4-2.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jondurbin__airoboros-l2-13b-gpt4-2.0", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-19T00:42:07.460646](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-l2-13b-gpt4-2.0/blob/main/results_2023-10-19T00-42-07.460646.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.3656669463087248, "em_stderr": 0.004932205632924282, "f1": 0.4289702181208073, "f1_stderr": 0.00478287167348305, "acc": 0.4096206676388381, "acc_stderr": 0.009827996178597372 }, "harness|drop|3": { "em": 0.3656669463087248, "em_stderr": 0.004932205632924282, "f1": 0.4289702181208073, "f1_stderr": 0.00478287167348305 }, "harness|gsm8k|5": { "acc": 0.07733131159969674, "acc_stderr": 0.00735771352322235 }, "harness|winogrande|5": { "acc": 0.7419100236779794, "acc_stderr": 0.012298278833972392 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_jondurbin__airoboros-l2-13b-gpt4-2.0
[ "region:us" ]
2023-08-18T17:52:56+00:00
{"pretty_name": "Evaluation run of jondurbin/airoboros-l2-13b-gpt4-2.0", "dataset_summary": "Dataset automatically created during the evaluation run of model [jondurbin/airoboros-l2-13b-gpt4-2.0](https://huggingface.co/jondurbin/airoboros-l2-13b-gpt4-2.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jondurbin__airoboros-l2-13b-gpt4-2.0\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-19T00:42:07.460646](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-l2-13b-gpt4-2.0/blob/main/results_2023-10-19T00-42-07.460646.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.3656669463087248,\n \"em_stderr\": 0.004932205632924282,\n \"f1\": 0.4289702181208073,\n \"f1_stderr\": 0.00478287167348305,\n \"acc\": 0.4096206676388381,\n \"acc_stderr\": 0.009827996178597372\n },\n \"harness|drop|3\": {\n \"em\": 0.3656669463087248,\n \"em_stderr\": 0.004932205632924282,\n \"f1\": 0.4289702181208073,\n \"f1_stderr\": 0.00478287167348305\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.07733131159969674,\n \"acc_stderr\": 0.00735771352322235\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7419100236779794,\n \"acc_stderr\": 0.012298278833972392\n }\n}\n```", "repo_url": "https://huggingface.co/jondurbin/airoboros-l2-13b-gpt4-2.0", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|arc:challenge|25_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_19T00_42_07.460646", "path": ["**/details_harness|drop|3_2023-10-19T00-42-07.460646.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-19T00-42-07.460646.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_19T00_42_07.460646", "path": ["**/details_harness|gsm8k|5_2023-10-19T00-42-07.460646.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-19T00-42-07.460646.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hellaswag|10_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T16:46:20.305842.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T16:46:20.305842.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T16:46:20.305842.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_19T00_42_07.460646", "path": ["**/details_harness|winogrande|5_2023-10-19T00-42-07.460646.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-19T00-42-07.460646.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T16_46_20.305842", "path": ["results_2023-08-17T16:46:20.305842.parquet"]}, {"split": "2023_10_19T00_42_07.460646", "path": ["results_2023-10-19T00-42-07.460646.parquet"]}, {"split": "latest", "path": ["results_2023-10-19T00-42-07.460646.parquet"]}]}]}
2023-10-18T23:42:20+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of jondurbin/airoboros-l2-13b-gpt4-2.0 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model jondurbin/airoboros-l2-13b-gpt4-2.0 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-19T00:42:07.460646(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of jondurbin/airoboros-l2-13b-gpt4-2.0", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-l2-13b-gpt4-2.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-19T00:42:07.460646(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of jondurbin/airoboros-l2-13b-gpt4-2.0", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-l2-13b-gpt4-2.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-19T00:42:07.460646(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 26, 31, 174, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of jondurbin/airoboros-l2-13b-gpt4-2.0## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-l2-13b-gpt4-2.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-19T00:42:07.460646(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
355c652411ffbb1fd64bcea5ea38c7ac1e65cb4f
# Dataset Card for Evaluation run of jondurbin/airoboros-13b-gpt4 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/jondurbin/airoboros-13b-gpt4 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [jondurbin/airoboros-13b-gpt4](https://huggingface.co/jondurbin/airoboros-13b-gpt4) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jondurbin__airoboros-13b-gpt4", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-23T04:00:44.911684](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-13b-gpt4/blob/main/results_2023-10-23T04-00-44.911684.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.014681208053691275, "em_stderr": 0.001231711314310859, "f1": 0.07406564597315451, "f1_stderr": 0.0017844772735649754, "acc": 0.4182714775789221, "acc_stderr": 0.009732871523024014 }, "harness|drop|3": { "em": 0.014681208053691275, "em_stderr": 0.001231711314310859, "f1": 0.07406564597315451, "f1_stderr": 0.0017844772735649754 }, "harness|gsm8k|5": { "acc": 0.07884761182714177, "acc_stderr": 0.00742339051987324 }, "harness|winogrande|5": { "acc": 0.7576953433307024, "acc_stderr": 0.012042352526174787 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_jondurbin__airoboros-13b-gpt4
[ "region:us" ]
2023-08-18T17:53:05+00:00
{"pretty_name": "Evaluation run of jondurbin/airoboros-13b-gpt4", "dataset_summary": "Dataset automatically created during the evaluation run of model [jondurbin/airoboros-13b-gpt4](https://huggingface.co/jondurbin/airoboros-13b-gpt4) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jondurbin__airoboros-13b-gpt4\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-23T04:00:44.911684](https://huggingface.co/datasets/open-llm-leaderboard/details_jondurbin__airoboros-13b-gpt4/blob/main/results_2023-10-23T04-00-44.911684.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.014681208053691275,\n \"em_stderr\": 0.001231711314310859,\n \"f1\": 0.07406564597315451,\n \"f1_stderr\": 0.0017844772735649754,\n \"acc\": 0.4182714775789221,\n \"acc_stderr\": 0.009732871523024014\n },\n \"harness|drop|3\": {\n \"em\": 0.014681208053691275,\n \"em_stderr\": 0.001231711314310859,\n \"f1\": 0.07406564597315451,\n \"f1_stderr\": 0.0017844772735649754\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.07884761182714177,\n \"acc_stderr\": 0.00742339051987324\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7576953433307024,\n \"acc_stderr\": 0.012042352526174787\n }\n}\n```", "repo_url": "https://huggingface.co/jondurbin/airoboros-13b-gpt4", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|arc:challenge|25_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_23T04_00_44.911684", "path": ["**/details_harness|drop|3_2023-10-23T04-00-44.911684.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-23T04-00-44.911684.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_23T04_00_44.911684", "path": ["**/details_harness|gsm8k|5_2023-10-23T04-00-44.911684.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-23T04-00-44.911684.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hellaswag|10_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T14:07:58.585031.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T14:07:58.585031.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T14:07:58.585031.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_23T04_00_44.911684", "path": ["**/details_harness|winogrande|5_2023-10-23T04-00-44.911684.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-23T04-00-44.911684.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_18T14_07_58.585031", "path": ["results_2023-08-18T14:07:58.585031.parquet"]}, {"split": "2023_10_23T04_00_44.911684", "path": ["results_2023-10-23T04-00-44.911684.parquet"]}, {"split": "latest", "path": ["results_2023-10-23T04-00-44.911684.parquet"]}]}]}
2023-10-23T03:00:56+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of jondurbin/airoboros-13b-gpt4 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model jondurbin/airoboros-13b-gpt4 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-23T04:00:44.911684(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of jondurbin/airoboros-13b-gpt4", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-13b-gpt4 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T04:00:44.911684(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of jondurbin/airoboros-13b-gpt4", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-13b-gpt4 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T04:00:44.911684(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 22, 31, 170, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of jondurbin/airoboros-13b-gpt4## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model jondurbin/airoboros-13b-gpt4 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-23T04:00:44.911684(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
6ff3c95ff6dcdcdd1159bf6c3797c9675113395e
# Dataset Card for Evaluation run of TheBloke/robin-65b-v2-fp16 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/TheBloke/robin-65b-v2-fp16 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [TheBloke/robin-65b-v2-fp16](https://huggingface.co/TheBloke/robin-65b-v2-fp16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TheBloke__robin-65b-v2-fp16", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-23T10:30:00.008059](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__robin-65b-v2-fp16/blob/main/results_2023-10-23T10-30-00.008059.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.002202181208053691, "em_stderr": 0.00048005108166193297, "f1": 0.064190436241611, "f1_stderr": 0.001385342539630455, "acc": 0.5374763713870437, "acc_stderr": 0.011680771136203586 }, "harness|drop|3": { "em": 0.002202181208053691, "em_stderr": 0.00048005108166193297, "f1": 0.064190436241611, "f1_stderr": 0.001385342539630455 }, "harness|gsm8k|5": { "acc": 0.2699014404852161, "acc_stderr": 0.012227442856468897 }, "harness|winogrande|5": { "acc": 0.8050513022888713, "acc_stderr": 0.011134099415938275 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_TheBloke__robin-65b-v2-fp16
[ "region:us" ]
2023-08-18T17:53:21+00:00
{"pretty_name": "Evaluation run of TheBloke/robin-65b-v2-fp16", "dataset_summary": "Dataset automatically created during the evaluation run of model [TheBloke/robin-65b-v2-fp16](https://huggingface.co/TheBloke/robin-65b-v2-fp16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TheBloke__robin-65b-v2-fp16\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-10-23T10:30:00.008059](https://huggingface.co/datasets/open-llm-leaderboard/details_TheBloke__robin-65b-v2-fp16/blob/main/results_2023-10-23T10-30-00.008059.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.002202181208053691,\n \"em_stderr\": 0.00048005108166193297,\n \"f1\": 0.064190436241611,\n \"f1_stderr\": 0.001385342539630455,\n \"acc\": 0.5374763713870437,\n \"acc_stderr\": 0.011680771136203586\n },\n \"harness|drop|3\": {\n \"em\": 0.002202181208053691,\n \"em_stderr\": 0.00048005108166193297,\n \"f1\": 0.064190436241611,\n \"f1_stderr\": 0.001385342539630455\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.2699014404852161,\n \"acc_stderr\": 0.012227442856468897\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8050513022888713,\n \"acc_stderr\": 0.011134099415938275\n }\n}\n```", "repo_url": "https://huggingface.co/TheBloke/robin-65b-v2-fp16", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|arc:challenge|25_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_10_23T10_30_00.008059", "path": ["**/details_harness|drop|3_2023-10-23T10-30-00.008059.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-10-23T10-30-00.008059.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_10_23T10_30_00.008059", "path": ["**/details_harness|gsm8k|5_2023-10-23T10-30-00.008059.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-10-23T10-30-00.008059.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hellaswag|10_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-17T22:09:59.169977.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T22:09:59.169977.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-17T22:09:59.169977.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_10_23T10_30_00.008059", "path": ["**/details_harness|winogrande|5_2023-10-23T10-30-00.008059.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-10-23T10-30-00.008059.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_17T22_09_59.169977", "path": ["results_2023-08-17T22:09:59.169977.parquet"]}, {"split": "2023_10_23T10_30_00.008059", "path": ["results_2023-10-23T10-30-00.008059.parquet"]}, {"split": "latest", "path": ["results_2023-10-23T10-30-00.008059.parquet"]}]}]}
2023-10-23T09:30:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of TheBloke/robin-65b-v2-fp16 ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model TheBloke/robin-65b-v2-fp16 on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-10-23T10:30:00.008059(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of TheBloke/robin-65b-v2-fp16", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model TheBloke/robin-65b-v2-fp16 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T10:30:00.008059(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of TheBloke/robin-65b-v2-fp16", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model TheBloke/robin-65b-v2-fp16 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-10-23T10:30:00.008059(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 25, 31, 173, 67, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of TheBloke/robin-65b-v2-fp16## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model TheBloke/robin-65b-v2-fp16 on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-10-23T10:30:00.008059(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
a722d0a9eae505827ae90aaaf68c173d456b2641
# Dataset Card for Evaluation run of timdettmers/guanaco-65b-merged ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/timdettmers/guanaco-65b-merged - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [timdettmers/guanaco-65b-merged](https://huggingface.co/timdettmers/guanaco-65b-merged) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_timdettmers__guanaco-65b-merged", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-08-18T00:17:34.582006](https://huggingface.co/datasets/open-llm-leaderboard/details_timdettmers__guanaco-65b-merged/blob/main/results_2023-08-18T00%3A17%3A34.582006.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.25104389504062485, "acc_stderr": 0.030647487837110618, "acc_norm": 0.2523346329049775, "acc_norm_stderr": 0.030669736900925226, "mc1": 0.24112607099143207, "mc1_stderr": 0.014974827279752346, "mc2": 0.4840947451540454, "mc2_stderr": 0.016324348732205056 }, "harness|arc:challenge|25": { "acc": 0.2030716723549488, "acc_stderr": 0.011755899303705582, "acc_norm": 0.27474402730375425, "acc_norm_stderr": 0.013044617212771227 }, "harness|hellaswag|10": { "acc": 0.2615016928898626, "acc_stderr": 0.004385544487143912, "acc_norm": 0.26598287193786097, "acc_norm_stderr": 0.004409521343140112 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.18518518518518517, "acc_stderr": 0.03355677216313142, "acc_norm": 0.18518518518518517, "acc_norm_stderr": 0.03355677216313142 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123398, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123398 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.21509433962264152, "acc_stderr": 0.02528839450289137, "acc_norm": 0.21509433962264152, "acc_norm_stderr": 0.02528839450289137 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.04020151261036845, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.20809248554913296, "acc_stderr": 0.030952890217749874, "acc_norm": 0.20809248554913296, "acc_norm_stderr": 0.030952890217749874 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.26382978723404255, "acc_stderr": 0.028809989854102973, "acc_norm": 0.26382978723404255, "acc_norm_stderr": 0.028809989854102973 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813365, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813365 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135302, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135302 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.20899470899470898, "acc_stderr": 0.02094048156533486, "acc_norm": 0.20899470899470898, "acc_norm_stderr": 0.02094048156533486 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04040610178208841, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04040610178208841 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536934, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.1774193548387097, "acc_stderr": 0.02173254068932927, "acc_norm": 0.1774193548387097, "acc_norm_stderr": 0.02173254068932927 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.15270935960591134, "acc_stderr": 0.02530890453938063, "acc_norm": 0.15270935960591134, "acc_norm_stderr": 0.02530890453938063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7696969696969697, "acc_stderr": 0.032876667586034906, "acc_norm": 0.7696969696969697, "acc_norm_stderr": 0.032876667586034906 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.17676767676767677, "acc_stderr": 0.027178752639044915, "acc_norm": 0.17676767676767677, "acc_norm_stderr": 0.027178752639044915 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.19689119170984457, "acc_stderr": 0.028697873971860664, "acc_norm": 0.19689119170984457, "acc_norm_stderr": 0.028697873971860664 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.20256410256410257, "acc_stderr": 0.020377660970371372, "acc_norm": 0.20256410256410257, "acc_norm_stderr": 0.020377660970371372 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2111111111111111, "acc_stderr": 0.024882116857655075, "acc_norm": 0.2111111111111111, "acc_norm_stderr": 0.024882116857655075 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.21008403361344538, "acc_stderr": 0.026461398717471874, "acc_norm": 0.21008403361344538, "acc_norm_stderr": 0.026461398717471874 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.1986754966887417, "acc_stderr": 0.03257847384436776, "acc_norm": 0.1986754966887417, "acc_norm_stderr": 0.03257847384436776 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.1926605504587156, "acc_stderr": 0.016909276884936094, "acc_norm": 0.1926605504587156, "acc_norm_stderr": 0.016909276884936094 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.1527777777777778, "acc_stderr": 0.024536326026134224, "acc_norm": 0.1527777777777778, "acc_norm_stderr": 0.024536326026134224 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8382352941176471, "acc_stderr": 0.02584501798692692, "acc_norm": 0.8382352941176471, "acc_norm_stderr": 0.02584501798692692 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.2742616033755274, "acc_stderr": 0.029041333510598035, "acc_norm": 0.2742616033755274, "acc_norm_stderr": 0.029041333510598035 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.31390134529147984, "acc_stderr": 0.031146796482972465, "acc_norm": 0.31390134529147984, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2595419847328244, "acc_stderr": 0.03844876139785271, "acc_norm": 0.2595419847328244, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2396694214876033, "acc_stderr": 0.03896878985070417, "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.03896878985070417 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25925925925925924, "acc_stderr": 0.042365112580946336, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.042365112580946336 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22085889570552147, "acc_stderr": 0.032591773927421776, "acc_norm": 0.22085889570552147, "acc_norm_stderr": 0.032591773927421776 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2948717948717949, "acc_stderr": 0.029872577708891148, "acc_norm": 0.2948717948717949, "acc_norm_stderr": 0.029872577708891148 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.23754789272030652, "acc_stderr": 0.015218733046150193, "acc_norm": 0.23754789272030652, "acc_norm_stderr": 0.015218733046150193 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.24855491329479767, "acc_stderr": 0.023267528432100174, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.023267528432100174 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.22549019607843138, "acc_stderr": 0.023929155517351284, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.023929155517351284 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.1864951768488746, "acc_stderr": 0.02212243977248077, "acc_norm": 0.1864951768488746, "acc_norm_stderr": 0.02212243977248077 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.21604938271604937, "acc_stderr": 0.022899162918445806, "acc_norm": 0.21604938271604937, "acc_norm_stderr": 0.022899162918445806 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.23404255319148937, "acc_stderr": 0.025257861359432417, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.025257861359432417 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.26792698826597133, "acc_stderr": 0.011311347690633881, "acc_norm": 0.26792698826597133, "acc_norm_stderr": 0.011311347690633881 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.18382352941176472, "acc_stderr": 0.023529242185193106, "acc_norm": 0.18382352941176472, "acc_norm_stderr": 0.023529242185193106 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03955932861795833, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03955932861795833 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.18775510204081633, "acc_stderr": 0.02500025603954621, "acc_norm": 0.18775510204081633, "acc_norm_stderr": 0.02500025603954621 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.03036049015401465, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.03036049015401465 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-virology|5": { "acc": 0.28313253012048195, "acc_stderr": 0.03507295431370518, "acc_norm": 0.28313253012048195, "acc_norm_stderr": 0.03507295431370518 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|truthfulqa:mc|0": { "mc1": 0.24112607099143207, "mc1_stderr": 0.014974827279752346, "mc2": 0.4840947451540454, "mc2_stderr": 0.016324348732205056 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_timdettmers__guanaco-65b-merged
[ "region:us" ]
2023-08-18T17:53:30+00:00
{"pretty_name": "Evaluation run of timdettmers/guanaco-65b-merged", "dataset_summary": "Dataset automatically created during the evaluation run of model [timdettmers/guanaco-65b-merged](https://huggingface.co/timdettmers/guanaco-65b-merged) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_timdettmers__guanaco-65b-merged\",\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-08-18T00:17:34.582006](https://huggingface.co/datasets/open-llm-leaderboard/details_timdettmers__guanaco-65b-merged/blob/main/results_2023-08-18T00%3A17%3A34.582006.json) (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.25104389504062485,\n \"acc_stderr\": 0.030647487837110618,\n \"acc_norm\": 0.2523346329049775,\n \"acc_norm_stderr\": 0.030669736900925226,\n \"mc1\": 0.24112607099143207,\n \"mc1_stderr\": 0.014974827279752346,\n \"mc2\": 0.4840947451540454,\n \"mc2_stderr\": 0.016324348732205056\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.2030716723549488,\n \"acc_stderr\": 0.011755899303705582,\n \"acc_norm\": 0.27474402730375425,\n \"acc_norm_stderr\": 0.013044617212771227\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2615016928898626,\n \"acc_stderr\": 0.004385544487143912,\n \"acc_norm\": 0.26598287193786097,\n \"acc_norm_stderr\": 0.004409521343140112\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.18518518518518517,\n \"acc_stderr\": 0.03355677216313142,\n \"acc_norm\": 0.18518518518518517,\n \"acc_norm_stderr\": 0.03355677216313142\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123398,\n \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123398\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.21509433962264152,\n \"acc_stderr\": 0.02528839450289137,\n \"acc_norm\": 0.21509433962264152,\n \"acc_norm_stderr\": 0.02528839450289137\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.20809248554913296,\n \"acc_stderr\": 0.030952890217749874,\n \"acc_norm\": 0.20809248554913296,\n \"acc_norm_stderr\": 0.030952890217749874\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237654,\n \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237654\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.26382978723404255,\n \"acc_stderr\": 0.028809989854102973,\n \"acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.028809989854102973\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n \"acc_stderr\": 0.039994238792813365,\n \"acc_norm\": 0.23684210526315788,\n \"acc_norm_stderr\": 0.039994238792813365\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.03565998174135302,\n \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.03565998174135302\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.20899470899470898,\n \"acc_stderr\": 0.02094048156533486,\n \"acc_norm\": 0.20899470899470898,\n \"acc_norm_stderr\": 0.02094048156533486\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.04040610178208841,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.04040610178208841\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.1774193548387097,\n \"acc_stderr\": 0.02173254068932927,\n \"acc_norm\": 0.1774193548387097,\n \"acc_norm_stderr\": 0.02173254068932927\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.15270935960591134,\n \"acc_stderr\": 0.02530890453938063,\n \"acc_norm\": 0.15270935960591134,\n \"acc_norm_stderr\": 0.02530890453938063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.032876667586034906,\n \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.032876667586034906\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.17676767676767677,\n \"acc_stderr\": 0.027178752639044915,\n \"acc_norm\": 0.17676767676767677,\n \"acc_norm_stderr\": 0.027178752639044915\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.19689119170984457,\n \"acc_stderr\": 0.028697873971860664,\n \"acc_norm\": 0.19689119170984457,\n \"acc_norm_stderr\": 0.028697873971860664\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.20256410256410257,\n \"acc_stderr\": 0.020377660970371372,\n \"acc_norm\": 0.20256410256410257,\n \"acc_norm_stderr\": 0.020377660970371372\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2111111111111111,\n \"acc_stderr\": 0.024882116857655075,\n \"acc_norm\": 0.2111111111111111,\n \"acc_norm_stderr\": 0.024882116857655075\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.21008403361344538,\n \"acc_stderr\": 0.026461398717471874,\n \"acc_norm\": 0.21008403361344538,\n \"acc_norm_stderr\": 0.026461398717471874\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.1986754966887417,\n \"acc_stderr\": 0.03257847384436776,\n \"acc_norm\": 0.1986754966887417,\n \"acc_norm_stderr\": 0.03257847384436776\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.1926605504587156,\n \"acc_stderr\": 0.016909276884936094,\n \"acc_norm\": 0.1926605504587156,\n \"acc_norm_stderr\": 0.016909276884936094\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.1527777777777778,\n \"acc_stderr\": 0.024536326026134224,\n \"acc_norm\": 0.1527777777777778,\n \"acc_norm_stderr\": 0.024536326026134224\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8382352941176471,\n \"acc_stderr\": 0.02584501798692692,\n \"acc_norm\": 0.8382352941176471,\n \"acc_norm_stderr\": 0.02584501798692692\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.2742616033755274,\n \"acc_stderr\": 0.029041333510598035,\n \"acc_norm\": 0.2742616033755274,\n \"acc_norm_stderr\": 0.029041333510598035\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.31390134529147984,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.31390134529147984,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2595419847328244,\n \"acc_stderr\": 0.03844876139785271,\n \"acc_norm\": 0.2595419847328244,\n \"acc_norm_stderr\": 0.03844876139785271\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2396694214876033,\n \"acc_stderr\": 0.03896878985070417,\n \"acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070417\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.042365112580946336,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.042365112580946336\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.032591773927421776,\n \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.032591773927421776\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3125,\n \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.3125,\n \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2948717948717949,\n \"acc_stderr\": 0.029872577708891148,\n \"acc_norm\": 0.2948717948717949,\n \"acc_norm_stderr\": 0.029872577708891148\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.23754789272030652,\n \"acc_stderr\": 0.015218733046150193,\n \"acc_norm\": 0.23754789272030652,\n \"acc_norm_stderr\": 0.015218733046150193\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.24855491329479767,\n \"acc_stderr\": 0.023267528432100174,\n \"acc_norm\": 0.24855491329479767,\n \"acc_norm_stderr\": 0.023267528432100174\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.023929155517351284,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.023929155517351284\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.1864951768488746,\n \"acc_stderr\": 0.02212243977248077,\n \"acc_norm\": 0.1864951768488746,\n \"acc_norm_stderr\": 0.02212243977248077\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.21604938271604937,\n \"acc_stderr\": 0.022899162918445806,\n \"acc_norm\": 0.21604938271604937,\n \"acc_norm_stderr\": 0.022899162918445806\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.23404255319148937,\n \"acc_stderr\": 0.025257861359432417,\n \"acc_norm\": 0.23404255319148937,\n \"acc_norm_stderr\": 0.025257861359432417\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.26792698826597133,\n \"acc_stderr\": 0.011311347690633881,\n \"acc_norm\": 0.26792698826597133,\n \"acc_norm_stderr\": 0.011311347690633881\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.18382352941176472,\n \"acc_stderr\": 0.023529242185193106,\n \"acc_norm\": 0.18382352941176472,\n \"acc_norm_stderr\": 0.023529242185193106\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03955932861795833,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03955932861795833\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.18775510204081633,\n \"acc_stderr\": 0.02500025603954621,\n \"acc_norm\": 0.18775510204081633,\n \"acc_norm_stderr\": 0.02500025603954621\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24378109452736318,\n \"acc_stderr\": 0.03036049015401465,\n \"acc_norm\": 0.24378109452736318,\n \"acc_norm_stderr\": 0.03036049015401465\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.28313253012048195,\n \"acc_stderr\": 0.03507295431370518,\n \"acc_norm\": 0.28313253012048195,\n \"acc_norm_stderr\": 0.03507295431370518\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3216374269005848,\n \"acc_stderr\": 0.03582529442573122,\n \"acc_norm\": 0.3216374269005848,\n \"acc_norm_stderr\": 0.03582529442573122\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.24112607099143207,\n \"mc1_stderr\": 0.014974827279752346,\n \"mc2\": 0.4840947451540454,\n \"mc2_stderr\": 0.016324348732205056\n }\n}\n```", "repo_url": "https://huggingface.co/timdettmers/guanaco-65b-merged", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|arc:challenge|25_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hellaswag|10_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T00:17:34.582006.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T00:17:34.582006.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_18T00_17_34.582006", "path": ["results_2023-08-18T00:17:34.582006.parquet"]}, {"split": "latest", "path": ["results_2023-08-18T00:17:34.582006.parquet"]}]}]}
2023-08-27T11:41:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of timdettmers/guanaco-65b-merged ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model timdettmers/guanaco-65b-merged on the Open LLM Leaderboard. The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-08-18T00:17:34.582006 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of timdettmers/guanaco-65b-merged", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model timdettmers/guanaco-65b-merged on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-08-18T00:17:34.582006 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of timdettmers/guanaco-65b-merged", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model timdettmers/guanaco-65b-merged on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-08-18T00:17:34.582006 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 23, 31, 171, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of timdettmers/guanaco-65b-merged## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model timdettmers/guanaco-65b-merged on the Open LLM Leaderboard.\n\nThe dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-08-18T00:17:34.582006 (note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
42117dac86eb2ea5a252199bef1183dbeffb6e71
# Dataset Card for Evaluation run of openthaigpt/openthaigpt-1.0.0-alpha-7b-chat-ckpt-hf ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/openthaigpt/openthaigpt-1.0.0-alpha-7b-chat-ckpt-hf - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** [email protected] ### Dataset Summary Dataset automatically created during the evaluation run of model [openthaigpt/openthaigpt-1.0.0-alpha-7b-chat-ckpt-hf](https://huggingface.co/openthaigpt/openthaigpt-1.0.0-alpha-7b-chat-ckpt-hf) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_openthaigpt__openthaigpt-1.0.0-alpha-7b-chat-ckpt-hf", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-22T23:15:18.463104](https://huggingface.co/datasets/open-llm-leaderboard/details_openthaigpt__openthaigpt-1.0.0-alpha-7b-chat-ckpt-hf/blob/main/results_2023-09-22T23-15-18.463104.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.031774328859060404, "em_stderr": 0.0017962473521312393, "f1": 0.08420092281879202, "f1_stderr": 0.0021474530604162255, "acc": 0.3646366953032391, "acc_stderr": 0.00915095624646051 }, "harness|drop|3": { "em": 0.031774328859060404, "em_stderr": 0.0017962473521312393, "f1": 0.08420092281879202, "f1_stderr": 0.0021474530604162255 }, "harness|gsm8k|5": { "acc": 0.03866565579984837, "acc_stderr": 0.005310583162098024 }, "harness|winogrande|5": { "acc": 0.6906077348066298, "acc_stderr": 0.012991329330822995 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_openthaigpt__openthaigpt-1.0.0-alpha-7b-chat-ckpt-hf
[ "region:us" ]
2023-08-18T17:53:40+00:00
{"pretty_name": "Evaluation run of openthaigpt/openthaigpt-1.0.0-alpha-7b-chat-ckpt-hf", "dataset_summary": "Dataset automatically created during the evaluation run of model [openthaigpt/openthaigpt-1.0.0-alpha-7b-chat-ckpt-hf](https://huggingface.co/openthaigpt/openthaigpt-1.0.0-alpha-7b-chat-ckpt-hf) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_openthaigpt__openthaigpt-1.0.0-alpha-7b-chat-ckpt-hf\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2023-09-22T23:15:18.463104](https://huggingface.co/datasets/open-llm-leaderboard/details_openthaigpt__openthaigpt-1.0.0-alpha-7b-chat-ckpt-hf/blob/main/results_2023-09-22T23-15-18.463104.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.031774328859060404,\n \"em_stderr\": 0.0017962473521312393,\n \"f1\": 0.08420092281879202,\n \"f1_stderr\": 0.0021474530604162255,\n \"acc\": 0.3646366953032391,\n \"acc_stderr\": 0.00915095624646051\n },\n \"harness|drop|3\": {\n \"em\": 0.031774328859060404,\n \"em_stderr\": 0.0017962473521312393,\n \"f1\": 0.08420092281879202,\n \"f1_stderr\": 0.0021474530604162255\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.03866565579984837,\n \"acc_stderr\": 0.005310583162098024\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6906077348066298,\n \"acc_stderr\": 0.012991329330822995\n }\n}\n```", "repo_url": "https://huggingface.co/openthaigpt/openthaigpt-1.0.0-alpha-7b-chat-ckpt-hf", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|arc:challenge|25_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_drop_3", "data_files": [{"split": "2023_09_22T23_15_18.463104", "path": ["**/details_harness|drop|3_2023-09-22T23-15-18.463104.parquet"]}, {"split": "latest", "path": ["**/details_harness|drop|3_2023-09-22T23-15-18.463104.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2023_09_22T23_15_18.463104", "path": ["**/details_harness|gsm8k|5_2023-09-22T23-15-18.463104.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2023-09-22T23-15-18.463104.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hellaswag|10_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-anatomy|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-astronomy|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-college_biology|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-college_physics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-computer_security|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-econometrics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-global_facts|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-human_aging|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-international_law|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-management|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-marketing|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-nutrition|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-philosophy|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-prehistory|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-professional_law|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-public_relations|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-security_studies|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-sociology|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-virology|5_2023-08-18T12:43:45.904593.parquet", "**/details_harness|hendrycksTest-world_religions|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T12:43:45.904593.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2023-08-18T12:43:45.904593.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2023_09_22T23_15_18.463104", "path": ["**/details_harness|winogrande|5_2023-09-22T23-15-18.463104.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2023-09-22T23-15-18.463104.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2023_08_18T12_43_45.904593", "path": ["results_2023-08-18T12:43:45.904593.parquet"]}, {"split": "2023_09_22T23_15_18.463104", "path": ["results_2023-09-22T23-15-18.463104.parquet"]}, {"split": "latest", "path": ["results_2023-09-22T23-15-18.463104.parquet"]}]}]}
2023-09-22T22:15:30+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of openthaigpt/openthaigpt-1.0.0-alpha-7b-chat-ckpt-hf ## Dataset Description - Homepage: - Repository: URL - Paper: - Leaderboard: URL - Point of Contact: clementine@URL ### Dataset Summary Dataset automatically created during the evaluation run of model openthaigpt/openthaigpt-1.0.0-alpha-7b-chat-ckpt-hf on the Open LLM Leaderboard. The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2023-09-22T23:15:18.463104(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ### Supported Tasks and Leaderboards ### Languages ## Dataset Structure ### Data Instances ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information ### Contributions
[ "# Dataset Card for Evaluation run of openthaigpt/openthaigpt-1.0.0-alpha-7b-chat-ckpt-hf", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model openthaigpt/openthaigpt-1.0.0-alpha-7b-chat-ckpt-hf on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-22T23:15:18.463104(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of openthaigpt/openthaigpt-1.0.0-alpha-7b-chat-ckpt-hf", "## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL", "### Dataset Summary\n\nDataset automatically created during the evaluation run of model openthaigpt/openthaigpt-1.0.0-alpha-7b-chat-ckpt-hf on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2023-09-22T23:15:18.463104(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "### Supported Tasks and Leaderboards", "### Languages", "## Dataset Structure", "### Data Instances", "### Data Fields", "### Data Splits", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Initial Data Collection and Normalization", "#### Who are the source language producers?", "### Annotations", "#### Annotation process", "#### Who are the annotators?", "### Personal and Sensitive Information", "## Considerations for Using the Data", "### Social Impact of Dataset", "### Discussion of Biases", "### Other Known Limitations", "## Additional Information", "### Dataset Curators", "### Licensing Information", "### Contributions" ]
[ 6, 34, 31, 182, 66, 10, 4, 6, 6, 5, 5, 5, 7, 4, 10, 10, 5, 5, 9, 8, 8, 7, 8, 7, 5, 6, 6, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of openthaigpt/openthaigpt-1.0.0-alpha-7b-chat-ckpt-hf## Dataset Description\n\n- Homepage: \n- Repository: URL\n- Paper: \n- Leaderboard: URL\n- Point of Contact: clementine@URL### Dataset Summary\n\nDataset automatically created during the evaluation run of model openthaigpt/openthaigpt-1.0.0-alpha-7b-chat-ckpt-hf on the Open LLM Leaderboard.\n\nThe dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2023-09-22T23:15:18.463104(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):### Supported Tasks and Leaderboards### Languages## Dataset Structure### Data Instances### Data Fields### Data Splits## Dataset Creation### Curation Rationale### Source Data#### Initial Data Collection and Normalization#### Who are the source language producers?### Annotations#### Annotation process#### Who are the annotators?### Personal and Sensitive Information## Considerations for Using the Data### Social Impact of Dataset### Discussion of Biases### Other Known Limitations## Additional Information### Dataset Curators### Licensing Information### Contributions" ]
d4d18e4e0216a0f48d0c370df95c859e84178568
# Dataset of aki_minoriko/あきみのりこ/秋穣子 (Touhou) This is the dataset of aki_minoriko/あきみのりこ/秋穣子 (Touhou), containing 500 images and their tags. The core tags of this character are `blonde_hair, short_hair, hat, red_eyes, mob_cap, red_headwear, breasts`, which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)). ## List of Packages | Name | Images | Size | Download | Type | Description | |:-----------------|---------:|:-----------|:---------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------| | raw | 500 | 519.30 MiB | [Download](https://huggingface.co/datasets/CyberHarem/aki_minoriko_touhou/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). | | 800 | 500 | 359.35 MiB | [Download](https://huggingface.co/datasets/CyberHarem/aki_minoriko_touhou/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. | | stage3-p480-800 | 1091 | 702.48 MiB | [Download](https://huggingface.co/datasets/CyberHarem/aki_minoriko_touhou/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | | 1200 | 500 | 483.88 MiB | [Download](https://huggingface.co/datasets/CyberHarem/aki_minoriko_touhou/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. | | stage3-p480-1200 | 1091 | 893.07 MiB | [Download](https://huggingface.co/datasets/CyberHarem/aki_minoriko_touhou/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. | ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code ```python import os import zipfile from huggingface_hub import hf_hub_download from waifuc.source import LocalSource # download raw archive file zip_file = hf_hub_download( repo_id='CyberHarem/aki_minoriko_touhou', repo_type='dataset', filename='dataset-raw.zip', ) # extract files to your directory dataset_dir = 'dataset_dir' os.makedirs(dataset_dir, exist_ok=True) with zipfile.ZipFile(zip_file, 'r') as zf: zf.extractall(dataset_dir) # load the dataset with waifuc source = LocalSource(dataset_dir) for item in source: print(item.image, item.meta['filename'], item.meta['tags']) ``` ## List of Clusters List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 0 | 10 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | 1girl, grapes, long_sleeves, solo, black_skirt, open_mouth, red_apron, yellow_shirt, hat_ornament, looking_at_viewer, wide_sleeves, smile, black_ribbon, blush, barefoot, full_body, orange_eyes, simple_background, white_background | | 1 | 7 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | 1girl, grapes, smile, solo, wide_sleeves, dress, apron, long_sleeves | | 2 | 26 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | 1girl, grapes, solo, smile, open_mouth, dress | | 3 | 7 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | 1girl, leaf_hair_ornament, solo, yellow_eyes, autumn_leaves, long_sleeves, looking_at_viewer, maple_leaf, smile, bangs, red_skirt, tree, collared_shirt, open_mouth | | 4 | 5 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | 1girl, leaf_hair_ornament, solo, yellow_eyes, smile, blush, skirt | | 5 | 10 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | 1girl, blush, grapes, large_breasts, nipples, looking_at_viewer, solo, completely_nude, collarbone, huge_breasts, smile, closed_mouth, hair_between_eyes, heart, navel, open_mouth, simple_background | | 6 | 8 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | 1boy, 1girl, blush, hetero, large_breasts, navel, nipples, open_mouth, penis, solo_focus, sweat, vaginal, looking_at_viewer, food-themed_hair_ornament, grapes, pov, spread_legs, bangs, bar_censor, on_back, smile, bed_sheet, cum_in_pussy, happy_sex, missionary, on_bed, pillow, completely_nude, heart-shaped_pupils, open_clothes | | 7 | 12 | ![](samples/7/clu7-sample0.png) | ![](samples/7/clu7-sample1.png) | ![](samples/7/clu7-sample2.png) | ![](samples/7/clu7-sample3.png) | ![](samples/7/clu7-sample4.png) | 1boy, 1girl, grapes, hetero, penis, solo_focus, blush, paizuri, large_breasts, smile, cum_on_breasts, nipples, bar_censor, facial, looking_at_viewer, open_mouth, pov | ### Table Version | # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | grapes | long_sleeves | solo | black_skirt | open_mouth | red_apron | yellow_shirt | hat_ornament | looking_at_viewer | wide_sleeves | smile | black_ribbon | blush | barefoot | full_body | orange_eyes | simple_background | white_background | dress | apron | leaf_hair_ornament | yellow_eyes | autumn_leaves | maple_leaf | bangs | red_skirt | tree | collared_shirt | skirt | large_breasts | nipples | completely_nude | collarbone | huge_breasts | closed_mouth | hair_between_eyes | heart | navel | 1boy | hetero | penis | solo_focus | sweat | vaginal | food-themed_hair_ornament | pov | spread_legs | bar_censor | on_back | bed_sheet | cum_in_pussy | happy_sex | missionary | on_bed | pillow | heart-shaped_pupils | open_clothes | paizuri | cum_on_breasts | facial | |----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:---------|:---------------|:-------|:--------------|:-------------|:------------|:---------------|:---------------|:--------------------|:---------------|:--------|:---------------|:--------|:-----------|:------------|:--------------|:--------------------|:-------------------|:--------|:--------|:---------------------|:--------------|:----------------|:-------------|:--------|:------------|:-------|:-----------------|:--------|:----------------|:----------|:------------------|:-------------|:---------------|:---------------|:--------------------|:--------|:--------|:-------|:---------|:--------|:-------------|:--------|:----------|:----------------------------|:------|:--------------|:-------------|:----------|:------------|:---------------|:------------|:-------------|:---------|:---------|:----------------------|:---------------|:----------|:-----------------|:---------| | 0 | 10 | ![](samples/0/clu0-sample0.png) | ![](samples/0/clu0-sample1.png) | ![](samples/0/clu0-sample2.png) | ![](samples/0/clu0-sample3.png) | ![](samples/0/clu0-sample4.png) | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 1 | 7 | ![](samples/1/clu1-sample0.png) | ![](samples/1/clu1-sample1.png) | ![](samples/1/clu1-sample2.png) | ![](samples/1/clu1-sample3.png) | ![](samples/1/clu1-sample4.png) | X | X | X | X | | | | | | | X | X | | | | | | | | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 2 | 26 | ![](samples/2/clu2-sample0.png) | ![](samples/2/clu2-sample1.png) | ![](samples/2/clu2-sample2.png) | ![](samples/2/clu2-sample3.png) | ![](samples/2/clu2-sample4.png) | X | X | | X | | X | | | | | | X | | | | | | | | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 3 | 7 | ![](samples/3/clu3-sample0.png) | ![](samples/3/clu3-sample1.png) | ![](samples/3/clu3-sample2.png) | ![](samples/3/clu3-sample3.png) | ![](samples/3/clu3-sample4.png) | X | | X | X | | X | | | | X | | X | | | | | | | | | | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 4 | 5 | ![](samples/4/clu4-sample0.png) | ![](samples/4/clu4-sample1.png) | ![](samples/4/clu4-sample2.png) | ![](samples/4/clu4-sample3.png) | ![](samples/4/clu4-sample4.png) | X | | | X | | | | | | | | X | | X | | | | | | | | X | X | | | | | | | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | 5 | 10 | ![](samples/5/clu5-sample0.png) | ![](samples/5/clu5-sample1.png) | ![](samples/5/clu5-sample2.png) | ![](samples/5/clu5-sample3.png) | ![](samples/5/clu5-sample4.png) | X | X | | X | | X | | | | X | | X | | X | | | | X | | | | | | | | | | | | | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | 6 | 8 | ![](samples/6/clu6-sample0.png) | ![](samples/6/clu6-sample1.png) | ![](samples/6/clu6-sample2.png) | ![](samples/6/clu6-sample3.png) | ![](samples/6/clu6-sample4.png) | X | X | | | | X | | | | X | | X | | X | | | | | | | | | | | | X | | | | | X | X | X | | | | | | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | 7 | 12 | ![](samples/7/clu7-sample0.png) | ![](samples/7/clu7-sample1.png) | ![](samples/7/clu7-sample2.png) | ![](samples/7/clu7-sample3.png) | ![](samples/7/clu7-sample4.png) | X | X | | | | X | | | | X | | X | | X | | | | | | | | | | | | | | | | | X | X | | | | | | | | X | X | X | X | | | | X | | X | | | | | | | | | | X | X | X |
CyberHarem/aki_minoriko_touhou
[ "task_categories:text-to-image", "size_categories:n<1K", "license:mit", "art", "not-for-all-audiences", "region:us" ]
2023-08-18T17:53:47+00:00
{"license": "mit", "size_categories": ["n<1K"], "task_categories": ["text-to-image"], "tags": ["art", "not-for-all-audiences"]}
2024-01-14T19:04:48+00:00
[]
[]
TAGS #task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us
Dataset of aki\_minoriko/あきみのりこ/秋穣子 (Touhou) ============================================ This is the dataset of aki\_minoriko/あきみのりこ/秋穣子 (Touhou), containing 500 images and their tags. The core tags of this character are 'blonde\_hair, short\_hair, hat, red\_eyes, mob\_cap, red\_headwear, breasts', which are pruned in this dataset. Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by DeepGHS Team(huggingface organization). List of Packages ---------------- ### Load Raw Dataset with Waifuc We provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code List of Clusters ---------------- List of tag clustering result, maybe some outfits can be mined here. ### Raw Text Version ### Table Version
[ "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ "TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n", "### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.", "### Raw Text Version", "### Table Version" ]
[ 44, 61, 5, 4 ]
[ "passage: TAGS\n#task_categories-text-to-image #size_categories-n<1K #license-mit #art #not-for-all-audiences #region-us \n### Load Raw Dataset with Waifuc\n\n\nWe provide raw dataset (including tagged images) for waifuc loading. If you need this, just run the following code\n\n\nList of Clusters\n----------------\n\n\nList of tag clustering result, maybe some outfits can be mined here.### Raw Text Version### Table Version" ]